var/home/core/zuul-output/0000755000175000017500000000000015071263137014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071275566015507 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005062470715071275556017725 0ustar rootrootOct 07 19:17:52 crc systemd[1]: Starting Kubernetes Kubelet... Oct 07 19:17:52 crc restorecon[4558]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:52 crc restorecon[4558]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 07 19:17:53 crc restorecon[4558]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 07 19:17:54 crc kubenswrapper[4813]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 19:17:54 crc kubenswrapper[4813]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 07 19:17:54 crc kubenswrapper[4813]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 19:17:54 crc kubenswrapper[4813]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 19:17:54 crc kubenswrapper[4813]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 07 19:17:54 crc kubenswrapper[4813]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.384229 4813 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394198 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394233 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394244 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394253 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394263 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394273 4813 feature_gate.go:330] unrecognized feature gate: Example Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394281 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394290 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394301 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394311 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394318 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394349 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394357 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394365 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394373 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394380 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394391 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394400 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394409 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394417 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394426 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394434 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394443 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394458 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394466 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394477 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394485 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394493 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394500 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394508 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394515 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394523 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394531 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394538 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394545 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394556 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394565 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394574 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394583 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394591 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394601 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394612 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394621 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394631 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394639 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394647 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394654 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394662 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394670 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394678 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394685 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394693 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394702 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394709 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394717 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394724 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394732 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394739 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394747 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394754 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394762 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394770 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394777 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394816 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394824 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394832 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394839 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394846 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394854 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394862 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.394870 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.395908 4813 flags.go:64] FLAG: --address="0.0.0.0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.395930 4813 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.395953 4813 flags.go:64] FLAG: --anonymous-auth="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.395964 4813 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.395979 4813 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.395988 4813 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396000 4813 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396011 4813 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396021 4813 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396030 4813 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396039 4813 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396061 4813 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396071 4813 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396080 4813 flags.go:64] FLAG: --cgroup-root="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396089 4813 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396098 4813 flags.go:64] FLAG: --client-ca-file="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396108 4813 flags.go:64] FLAG: --cloud-config="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396117 4813 flags.go:64] FLAG: --cloud-provider="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396125 4813 flags.go:64] FLAG: --cluster-dns="[]" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396142 4813 flags.go:64] FLAG: --cluster-domain="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396150 4813 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396159 4813 flags.go:64] FLAG: --config-dir="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396168 4813 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396178 4813 flags.go:64] FLAG: --container-log-max-files="5" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396188 4813 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396197 4813 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396206 4813 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396215 4813 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396225 4813 flags.go:64] FLAG: --contention-profiling="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396235 4813 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396244 4813 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396253 4813 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396262 4813 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396273 4813 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396282 4813 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396290 4813 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396299 4813 flags.go:64] FLAG: --enable-load-reader="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396308 4813 flags.go:64] FLAG: --enable-server="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396317 4813 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396377 4813 flags.go:64] FLAG: --event-burst="100" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396388 4813 flags.go:64] FLAG: --event-qps="50" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396397 4813 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396406 4813 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396416 4813 flags.go:64] FLAG: --eviction-hard="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396427 4813 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396436 4813 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396445 4813 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396471 4813 flags.go:64] FLAG: --eviction-soft="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396480 4813 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396489 4813 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396498 4813 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396507 4813 flags.go:64] FLAG: --experimental-mounter-path="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396516 4813 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396525 4813 flags.go:64] FLAG: --fail-swap-on="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396535 4813 flags.go:64] FLAG: --feature-gates="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396546 4813 flags.go:64] FLAG: --file-check-frequency="20s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396556 4813 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396565 4813 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396574 4813 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396583 4813 flags.go:64] FLAG: --healthz-port="10248" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396593 4813 flags.go:64] FLAG: --help="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396602 4813 flags.go:64] FLAG: --hostname-override="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396611 4813 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396620 4813 flags.go:64] FLAG: --http-check-frequency="20s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396629 4813 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396638 4813 flags.go:64] FLAG: --image-credential-provider-config="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396647 4813 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396656 4813 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396664 4813 flags.go:64] FLAG: --image-service-endpoint="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396673 4813 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396682 4813 flags.go:64] FLAG: --kube-api-burst="100" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396691 4813 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396701 4813 flags.go:64] FLAG: --kube-api-qps="50" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396710 4813 flags.go:64] FLAG: --kube-reserved="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396719 4813 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396727 4813 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396736 4813 flags.go:64] FLAG: --kubelet-cgroups="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396745 4813 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396754 4813 flags.go:64] FLAG: --lock-file="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396763 4813 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396772 4813 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396781 4813 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396794 4813 flags.go:64] FLAG: --log-json-split-stream="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396814 4813 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396823 4813 flags.go:64] FLAG: --log-text-split-stream="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396832 4813 flags.go:64] FLAG: --logging-format="text" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396841 4813 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396851 4813 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396860 4813 flags.go:64] FLAG: --manifest-url="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396868 4813 flags.go:64] FLAG: --manifest-url-header="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396879 4813 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396889 4813 flags.go:64] FLAG: --max-open-files="1000000" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396901 4813 flags.go:64] FLAG: --max-pods="110" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396910 4813 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396919 4813 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396928 4813 flags.go:64] FLAG: --memory-manager-policy="None" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396937 4813 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396946 4813 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396954 4813 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396963 4813 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396982 4813 flags.go:64] FLAG: --node-status-max-images="50" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.396991 4813 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397000 4813 flags.go:64] FLAG: --oom-score-adj="-999" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397009 4813 flags.go:64] FLAG: --pod-cidr="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397018 4813 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397036 4813 flags.go:64] FLAG: --pod-manifest-path="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397046 4813 flags.go:64] FLAG: --pod-max-pids="-1" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397055 4813 flags.go:64] FLAG: --pods-per-core="0" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397064 4813 flags.go:64] FLAG: --port="10250" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397072 4813 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397081 4813 flags.go:64] FLAG: --provider-id="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397090 4813 flags.go:64] FLAG: --qos-reserved="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397099 4813 flags.go:64] FLAG: --read-only-port="10255" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397108 4813 flags.go:64] FLAG: --register-node="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397117 4813 flags.go:64] FLAG: --register-schedulable="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397126 4813 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397140 4813 flags.go:64] FLAG: --registry-burst="10" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397150 4813 flags.go:64] FLAG: --registry-qps="5" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397158 4813 flags.go:64] FLAG: --reserved-cpus="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397178 4813 flags.go:64] FLAG: --reserved-memory="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397189 4813 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397198 4813 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397207 4813 flags.go:64] FLAG: --rotate-certificates="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397217 4813 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397225 4813 flags.go:64] FLAG: --runonce="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397234 4813 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397243 4813 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397253 4813 flags.go:64] FLAG: --seccomp-default="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397261 4813 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397270 4813 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397280 4813 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397288 4813 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397298 4813 flags.go:64] FLAG: --storage-driver-password="root" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397306 4813 flags.go:64] FLAG: --storage-driver-secure="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397315 4813 flags.go:64] FLAG: --storage-driver-table="stats" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397357 4813 flags.go:64] FLAG: --storage-driver-user="root" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397366 4813 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397375 4813 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397384 4813 flags.go:64] FLAG: --system-cgroups="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397393 4813 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397408 4813 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397417 4813 flags.go:64] FLAG: --tls-cert-file="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397426 4813 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397444 4813 flags.go:64] FLAG: --tls-min-version="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397453 4813 flags.go:64] FLAG: --tls-private-key-file="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397463 4813 flags.go:64] FLAG: --topology-manager-policy="none" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397472 4813 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397482 4813 flags.go:64] FLAG: --topology-manager-scope="container" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397491 4813 flags.go:64] FLAG: --v="2" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397503 4813 flags.go:64] FLAG: --version="false" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397514 4813 flags.go:64] FLAG: --vmodule="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397524 4813 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.397534 4813 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397752 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397764 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397785 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397795 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397803 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397811 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397820 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397828 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397835 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397843 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397851 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397859 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397867 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397874 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397882 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397889 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397897 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397906 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397914 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397921 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397929 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397936 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397944 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397954 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397965 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397974 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397982 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.397991 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398000 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398008 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398016 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398025 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398035 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398043 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398050 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398058 4813 feature_gate.go:330] unrecognized feature gate: Example Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398066 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398074 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398092 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398100 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398108 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398115 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398123 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398131 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398139 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398146 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398154 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398163 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398171 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398179 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398186 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398193 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398201 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398209 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398218 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398226 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398234 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398241 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398250 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398257 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398265 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398273 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398281 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398288 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398297 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398305 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398313 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398320 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398353 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398364 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.398374 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.398386 4813 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.410072 4813 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.410142 4813 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410257 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410268 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410277 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410286 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410295 4813 feature_gate.go:330] unrecognized feature gate: Example Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410304 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410313 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410350 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410358 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410366 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410374 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410384 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410393 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410402 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410410 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410420 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410429 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410437 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410444 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410452 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410459 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410467 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410475 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410483 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410491 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410499 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410507 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410514 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410522 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410529 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410539 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410550 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410561 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410570 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410579 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410588 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410596 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410603 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410611 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410618 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410626 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410634 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410641 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410650 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410657 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410664 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410672 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410680 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410687 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410695 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410702 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410713 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410722 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410731 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410739 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410746 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410756 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410764 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410809 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410818 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410826 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410834 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410841 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410849 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410857 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410864 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410872 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410880 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410887 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410895 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.410904 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.410916 4813 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411126 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411138 4813 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411147 4813 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411156 4813 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411163 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411171 4813 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411179 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411187 4813 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411195 4813 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411203 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411213 4813 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411222 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411230 4813 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411237 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411245 4813 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411253 4813 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411262 4813 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411271 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411278 4813 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411286 4813 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411294 4813 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411301 4813 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411308 4813 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411319 4813 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411352 4813 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411361 4813 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411370 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411378 4813 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411387 4813 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411395 4813 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411403 4813 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411411 4813 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411420 4813 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411428 4813 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411437 4813 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411446 4813 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411454 4813 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411463 4813 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411471 4813 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411479 4813 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411486 4813 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411494 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411502 4813 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411509 4813 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411517 4813 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411524 4813 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411532 4813 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411539 4813 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411547 4813 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411702 4813 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411790 4813 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411798 4813 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411807 4813 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411816 4813 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411824 4813 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411832 4813 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411841 4813 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411849 4813 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411857 4813 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411866 4813 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411873 4813 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411881 4813 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411890 4813 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411898 4813 feature_gate.go:330] unrecognized feature gate: Example Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411906 4813 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411914 4813 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411923 4813 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411931 4813 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411939 4813 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411949 4813 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.411961 4813 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.411974 4813 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.412195 4813 server.go:940] "Client rotation is on, will bootstrap in background" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.417794 4813 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.417929 4813 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.419649 4813 server.go:997] "Starting client certificate rotation" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.419703 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.420031 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-23 02:17:48.988952317 +0000 UTC Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.420116 4813 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1110h59m54.568838303s for next certificate rotation Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.445603 4813 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.447241 4813 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.469121 4813 log.go:25] "Validated CRI v1 runtime API" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.504687 4813 log.go:25] "Validated CRI v1 image API" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.505865 4813 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.510518 4813 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-07-19-12-33-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.510541 4813 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.520371 4813 manager.go:217] Machine: {Timestamp:2025-10-07 19:17:54.518264145 +0000 UTC m=+0.596519776 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199464448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:2045744e-c123-49af-abd0-32768e95c3cd BootID:a5d0af03-2f8a-4e5c-98e8-1fba456b2042 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076106 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599730176 Type:vfs Inodes:3076106 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039894528 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599734272 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d1:ee:96 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:d1:ee:96 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:25:64:65 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:c1:60:94 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:a8:cc:93 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:83:81:e3 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d6:e4:8b:2a:af:c2 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:4e:42:9e:58:92:94 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199464448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.520518 4813 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.520638 4813 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.521414 4813 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.521555 4813 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.521581 4813 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.521727 4813 topology_manager.go:138] "Creating topology manager with none policy" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.521736 4813 container_manager_linux.go:303] "Creating device plugin manager" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.522149 4813 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.522173 4813 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.522896 4813 state_mem.go:36] "Initialized new in-memory state store" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.522971 4813 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.529970 4813 kubelet.go:418] "Attempting to sync node with API server" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.530054 4813 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.530153 4813 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.530169 4813 kubelet.go:324] "Adding apiserver pod source" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.530184 4813 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.535832 4813 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.536564 4813 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.538756 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.538855 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.538775 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.538962 4813 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.539006 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540515 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540599 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540651 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540708 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540766 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540815 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540876 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540936 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.540988 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.541037 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.541102 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.541153 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.541988 4813 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.542445 4813 server.go:1280] "Started kubelet" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.543207 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.543542 4813 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 07 19:17:54 crc systemd[1]: Started Kubernetes Kubelet. Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.544469 4813 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.544760 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.544792 4813 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.544940 4813 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.544944 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 12:15:11.527367055 +0000 UTC Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.545092 4813 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1216h57m16.982279693s for next certificate rotation Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.545202 4813 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.545219 4813 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.545287 4813 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.548489 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.550847 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.550921 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556056 4813 factory.go:55] Registering systemd factory Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556078 4813 factory.go:221] Registration of the systemd container factory successfully Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556502 4813 factory.go:153] Registering CRI-O factory Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556585 4813 factory.go:221] Registration of the crio container factory successfully Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556702 4813 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556780 4813 factory.go:103] Registering Raw factory Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.556859 4813 manager.go:1196] Started watching for new ooms in manager Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.556963 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="200ms" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.557520 4813 manager.go:319] Starting recovery of all containers Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.560242 4813 server.go:460] "Adding debug handlers to kubelet server" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.554627 4813 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186c4b98eadc079c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-07 19:17:54.542421916 +0000 UTC m=+0.620677527,LastTimestamp:2025-10-07 19:17:54.542421916 +0000 UTC m=+0.620677527,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.563987 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564033 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564050 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564064 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564079 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564097 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564113 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564130 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564150 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564162 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564173 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564186 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564198 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564214 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564228 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564240 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564254 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564265 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564279 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564291 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564302 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564314 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564419 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564440 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564455 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564470 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564491 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564509 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564524 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564538 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564554 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564572 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564588 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564603 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564620 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564635 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564647 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564658 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564670 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564682 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564694 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564708 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564720 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564732 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564745 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564757 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564771 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564784 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564796 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564809 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564823 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564835 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564851 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564866 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564879 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564893 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564906 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564919 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564932 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564945 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564957 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564969 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564982 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.564995 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565007 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565020 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565032 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565044 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565056 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565070 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565083 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565095 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565108 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565119 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565131 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565143 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565156 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565167 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565180 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565200 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565213 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565226 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565238 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565250 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565261 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565274 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565287 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565301 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565313 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565347 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565360 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565374 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565386 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565398 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565410 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565422 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565435 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565447 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565460 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565475 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565491 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565507 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565523 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565538 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565563 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565579 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565592 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565606 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565619 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565632 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565645 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565659 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565672 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565686 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565698 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565710 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565722 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565734 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565747 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565772 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565787 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565814 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565834 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565851 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565866 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565884 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565906 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565924 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565938 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565956 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565971 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.565989 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566004 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566019 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566033 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566045 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566059 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566071 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566082 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566095 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566107 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566120 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566140 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566152 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566163 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566176 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566187 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566199 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566210 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566222 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566234 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566246 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566257 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566269 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566282 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566294 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566306 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566317 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566349 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566362 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.566374 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.570851 4813 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.571630 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572277 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572296 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572357 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572368 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572376 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572386 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572394 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572403 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572413 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572421 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572430 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572438 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572449 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572458 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572467 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572477 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572488 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572499 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572510 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572519 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572528 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572537 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572547 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572556 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572565 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572574 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572582 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572590 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572599 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572634 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572643 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572653 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572662 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572671 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572682 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572690 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572700 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572713 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572725 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572735 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572748 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572757 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572767 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572775 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572784 4813 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572793 4813 reconstruct.go:97] "Volume reconstruction finished" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.572799 4813 reconciler.go:26] "Reconciler: start to sync state" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.580297 4813 manager.go:324] Recovery completed Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.593882 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.596196 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.596244 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.596257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.597116 4813 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.597128 4813 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.597146 4813 state_mem.go:36] "Initialized new in-memory state store" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.599570 4813 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.601222 4813 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.601262 4813 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.601286 4813 kubelet.go:2335] "Starting kubelet main sync loop" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.601346 4813 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 07 19:17:54 crc kubenswrapper[4813]: W1007 19:17:54.605050 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.605122 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.618958 4813 policy_none.go:49] "None policy: Start" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.620365 4813 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.620387 4813 state_mem.go:35] "Initializing new in-memory state store" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.649050 4813 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.674484 4813 manager.go:334] "Starting Device Plugin manager" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.675147 4813 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.675172 4813 server.go:79] "Starting device plugin registration server" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.675826 4813 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.675852 4813 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.676261 4813 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.676395 4813 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.676433 4813 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.683062 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.702277 4813 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.702382 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.703977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.704020 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.704031 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.704180 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.704366 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.704407 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705049 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705069 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705049 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705081 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705103 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705215 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705422 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.705453 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706049 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706067 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706075 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706146 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706160 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706278 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706306 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706162 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706512 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.706985 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.707004 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.707012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.707114 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.707475 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.707504 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.708770 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.708795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.708809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709032 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709040 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709046 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709051 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709059 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709193 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.709218 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.711187 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.711217 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.711229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.757910 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="400ms" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775670 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775706 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775728 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775749 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775849 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775872 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775888 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775914 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775948 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.775983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.776002 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.776034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.776049 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.776062 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.776396 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.777177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.777205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.777225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.777260 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.777804 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877497 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877566 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877598 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877628 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877664 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877691 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877719 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877748 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877778 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877775 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877822 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877775 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877806 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877861 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877774 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877919 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877881 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877882 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.877998 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878043 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878028 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878016 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878094 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878125 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878190 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878199 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878074 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878300 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.878396 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.977899 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.980060 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.980101 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.980112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:54 crc kubenswrapper[4813]: I1007 19:17:54.980136 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 19:17:54 crc kubenswrapper[4813]: E1007 19:17:54.980707 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.045238 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.052616 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.068279 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.084643 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.088724 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.103417 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-eaae094ebf2d1d6273027097787b3e0e8536e40e12f1181d282b9b3df441e369 WatchSource:0}: Error finding container eaae094ebf2d1d6273027097787b3e0e8536e40e12f1181d282b9b3df441e369: Status 404 returned error can't find the container with id eaae094ebf2d1d6273027097787b3e0e8536e40e12f1181d282b9b3df441e369 Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.104489 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-dc8a22904b60093b787df7ff5c8170dfbfb28059252e8bbfc738958d0c5cdde8 WatchSource:0}: Error finding container dc8a22904b60093b787df7ff5c8170dfbfb28059252e8bbfc738958d0c5cdde8: Status 404 returned error can't find the container with id dc8a22904b60093b787df7ff5c8170dfbfb28059252e8bbfc738958d0c5cdde8 Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.118505 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-92a1b570cd6c208ecd116be238b09b4402676c4f865a3d94b18e4370c3282cbb WatchSource:0}: Error finding container 92a1b570cd6c208ecd116be238b09b4402676c4f865a3d94b18e4370c3282cbb: Status 404 returned error can't find the container with id 92a1b570cd6c208ecd116be238b09b4402676c4f865a3d94b18e4370c3282cbb Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.122083 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-20b6f9108e07bbe22c7ac0eca612c0eb59bbd5e12e18a9f3424f8cf1e6986a39 WatchSource:0}: Error finding container 20b6f9108e07bbe22c7ac0eca612c0eb59bbd5e12e18a9f3424f8cf1e6986a39: Status 404 returned error can't find the container with id 20b6f9108e07bbe22c7ac0eca612c0eb59bbd5e12e18a9f3424f8cf1e6986a39 Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.123385 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c52b566270f6b3cb65e184970f2b0f36a978e769190d99c7b7686d55f291c379 WatchSource:0}: Error finding container c52b566270f6b3cb65e184970f2b0f36a978e769190d99c7b7686d55f291c379: Status 404 returned error can't find the container with id c52b566270f6b3cb65e184970f2b0f36a978e769190d99c7b7686d55f291c379 Oct 07 19:17:55 crc kubenswrapper[4813]: E1007 19:17:55.159612 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="800ms" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.380991 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.382643 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.382670 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.382678 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.382700 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 19:17:55 crc kubenswrapper[4813]: E1007 19:17:55.383074 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.544826 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.608726 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"20b6f9108e07bbe22c7ac0eca612c0eb59bbd5e12e18a9f3424f8cf1e6986a39"} Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.609509 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"92a1b570cd6c208ecd116be238b09b4402676c4f865a3d94b18e4370c3282cbb"} Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.610359 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dc8a22904b60093b787df7ff5c8170dfbfb28059252e8bbfc738958d0c5cdde8"} Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.611197 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"eaae094ebf2d1d6273027097787b3e0e8536e40e12f1181d282b9b3df441e369"} Oct 07 19:17:55 crc kubenswrapper[4813]: I1007 19:17:55.611917 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c52b566270f6b3cb65e184970f2b0f36a978e769190d99c7b7686d55f291c379"} Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.743051 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:55 crc kubenswrapper[4813]: E1007 19:17:55.743128 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.777099 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:55 crc kubenswrapper[4813]: E1007 19:17:55.777159 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:55 crc kubenswrapper[4813]: W1007 19:17:55.806347 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:55 crc kubenswrapper[4813]: E1007 19:17:55.806425 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:55 crc kubenswrapper[4813]: E1007 19:17:55.961548 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="1.6s" Oct 07 19:17:56 crc kubenswrapper[4813]: W1007 19:17:56.102570 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:56 crc kubenswrapper[4813]: E1007 19:17:56.102756 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.183544 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.185046 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.185090 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.185102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.185132 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 19:17:56 crc kubenswrapper[4813]: E1007 19:17:56.186573 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.544697 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.617975 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2" exitCode=0 Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.618056 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.618540 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.619932 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.619987 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.620009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.620896 4813 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb" exitCode=0 Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.620954 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.621012 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.622044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.622071 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.622092 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.622115 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.623125 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.623171 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.623184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.624895 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.625147 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.624965 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.625844 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.626044 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.626736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.626786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.626812 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.635186 4813 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c" exitCode=0 Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.635345 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.635503 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.639842 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.639874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.639887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.640921 4813 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="e095b845cf48b87ddbd052291f606128d827fdfce6a535afdff9e00635229f50" exitCode=0 Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.640996 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"e095b845cf48b87ddbd052291f606128d827fdfce6a535afdff9e00635229f50"} Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.641025 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.642953 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.642976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:56 crc kubenswrapper[4813]: I1007 19:17:56.642987 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.146987 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:57 crc kubenswrapper[4813]: W1007 19:17:57.431842 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:57 crc kubenswrapper[4813]: E1007 19:17:57.431918 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.544151 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:57 crc kubenswrapper[4813]: E1007 19:17:57.563114 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="3.2s" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.644865 4813 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846" exitCode=0 Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.644936 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.644964 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.646429 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.646460 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.646471 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.648222 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.648236 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a38d5db77ffa8a2032673e8d879927c1d10d9bdc91969b1c58b7ca1583fda848"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.652414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.652439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.652450 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.656780 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.656812 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.656826 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.656839 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.659552 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660021 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660475 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660487 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468"} Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660892 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.660924 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.661552 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.661853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.661874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.787016 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.788139 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.788170 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.788182 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:57 crc kubenswrapper[4813]: I1007 19:17:57.788206 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 19:17:57 crc kubenswrapper[4813]: E1007 19:17:57.788665 4813 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Oct 07 19:17:58 crc kubenswrapper[4813]: W1007 19:17:58.017544 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:58 crc kubenswrapper[4813]: E1007 19:17:58.017623 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:58 crc kubenswrapper[4813]: W1007 19:17:58.089074 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:58 crc kubenswrapper[4813]: E1007 19:17:58.089158 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:58 crc kubenswrapper[4813]: W1007 19:17:58.138576 4813 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:58 crc kubenswrapper[4813]: E1007 19:17:58.138656 4813 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.460445 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.470170 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.543991 4813 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.665210 4813 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186" exitCode=0 Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.665283 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186"} Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.665432 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.666702 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.666726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.666734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.667379 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669468 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="10fd3585683137908193ef9592477296a6ccb8c27557170ea153695d89adf8d4" exitCode=255 Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669570 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669603 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"10fd3585683137908193ef9592477296a6ccb8c27557170ea153695d89adf8d4"} Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669673 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669763 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669782 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669861 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.669891 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.670957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.670980 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.670989 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671206 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671310 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671689 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671713 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671757 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.671820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:58 crc kubenswrapper[4813]: I1007 19:17:58.672286 4813 scope.go:117] "RemoveContainer" containerID="10fd3585683137908193ef9592477296a6ccb8c27557170ea153695d89adf8d4" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.263935 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.675857 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.678266 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595"} Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.678418 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.679467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.679495 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.679503 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.681869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5"} Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.681912 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684"} Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.681937 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd"} Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.681888 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.681970 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.682955 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.682982 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.682992 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.683186 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.683234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:17:59 crc kubenswrapper[4813]: I1007 19:17:59.683253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.148272 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.148416 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.690749 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f"} Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.690817 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5"} Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.690827 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.690883 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.691055 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.691819 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.691860 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.691869 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.692805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.692853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.692870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.989750 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.991507 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.991568 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.991589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:00 crc kubenswrapper[4813]: I1007 19:18:00.991633 4813 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.693666 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.693666 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.695358 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.695371 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.695429 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.695442 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.695453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:01 crc kubenswrapper[4813]: I1007 19:18:01.695468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.302137 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.302420 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.304013 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.304044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.304060 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.556502 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.699105 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.701159 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.701462 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:03 crc kubenswrapper[4813]: I1007 19:18:03.701702 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:04 crc kubenswrapper[4813]: E1007 19:18:04.683228 4813 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 07 19:18:04 crc kubenswrapper[4813]: I1007 19:18:04.807849 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:18:04 crc kubenswrapper[4813]: I1007 19:18:04.808086 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:04 crc kubenswrapper[4813]: I1007 19:18:04.809633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:04 crc kubenswrapper[4813]: I1007 19:18:04.809673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:04 crc kubenswrapper[4813]: I1007 19:18:04.809682 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.167893 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.168077 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.169015 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.169057 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.169066 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.371617 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.708250 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.709455 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.709505 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:05 crc kubenswrapper[4813]: I1007 19:18:05.709528 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:06 crc kubenswrapper[4813]: I1007 19:18:06.177947 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 07 19:18:06 crc kubenswrapper[4813]: I1007 19:18:06.178145 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:06 crc kubenswrapper[4813]: I1007 19:18:06.179246 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:06 crc kubenswrapper[4813]: I1007 19:18:06.179274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:06 crc kubenswrapper[4813]: I1007 19:18:06.179286 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:09 crc kubenswrapper[4813]: I1007 19:18:09.070097 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 19:18:09 crc kubenswrapper[4813]: I1007 19:18:09.070164 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 19:18:09 crc kubenswrapper[4813]: I1007 19:18:09.076300 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 07 19:18:09 crc kubenswrapper[4813]: I1007 19:18:09.076565 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 07 19:18:09 crc kubenswrapper[4813]: I1007 19:18:09.264551 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 07 19:18:09 crc kubenswrapper[4813]: I1007 19:18:09.264620 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 07 19:18:10 crc kubenswrapper[4813]: I1007 19:18:10.148295 4813 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 19:18:10 crc kubenswrapper[4813]: I1007 19:18:10.149011 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 19:18:12 crc kubenswrapper[4813]: I1007 19:18:12.701917 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 07 19:18:12 crc kubenswrapper[4813]: I1007 19:18:12.701981 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.559887 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.560037 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.560351 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.560396 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.561044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.561064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.561072 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.571722 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.727998 4813 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.728422 4813 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.728750 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.729627 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.729658 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:13 crc kubenswrapper[4813]: I1007 19:18:13.729670 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.067680 4813 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.071756 4813 trace.go:236] Trace[2109454126]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 19:18:02.856) (total time: 11214ms): Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[2109454126]: ---"Objects listed" error: 11214ms (19:18:14.071) Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[2109454126]: [11.214892177s] [11.214892177s] END Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.072385 4813 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.071876 4813 trace.go:236] Trace[475824603]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 19:18:01.861) (total time: 12210ms): Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[475824603]: ---"Objects listed" error: 12210ms (19:18:14.071) Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[475824603]: [12.210347655s] [12.210347655s] END Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.072623 4813 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.073932 4813 trace.go:236] Trace[212279471]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 19:18:01.492) (total time: 12581ms): Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[212279471]: ---"Objects listed" error: 12581ms (19:18:14.073) Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[212279471]: [12.581904744s] [12.581904744s] END Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.073954 4813 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.076763 4813 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.077739 4813 trace.go:236] Trace[1358961030]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (07-Oct-2025 19:18:02.178) (total time: 11898ms): Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[1358961030]: ---"Objects listed" error: 11898ms (19:18:14.077) Oct 07 19:18:14 crc kubenswrapper[4813]: Trace[1358961030]: [11.89874083s] [11.89874083s] END Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.077934 4813 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.083544 4813 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.083863 4813 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.085430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.085549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.085634 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.085747 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.085833 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.106972 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.116502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.116568 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.116587 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.116611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.116630 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.135894 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.142583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.142629 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.142641 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.142661 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.142674 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.156542 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.160768 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.160797 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.160806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.160820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.160830 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.182468 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.189097 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.189313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.189411 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.189494 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.189562 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.205706 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.206176 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.207673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.207710 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.207720 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.207736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.207746 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.309565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.309598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.309609 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.309625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.309636 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.411983 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.412019 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.412033 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.412050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.412058 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.514031 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.514082 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.514096 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.514114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.514126 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.544002 4813 apiserver.go:52] "Watching apiserver" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.546951 4813 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.547279 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-j4kjg","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.547694 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.547807 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.547816 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.547911 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.548000 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.548103 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.548695 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.548768 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.548977 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.549020 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.549913 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551670 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551691 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551726 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551672 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551807 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551902 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.551952 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.552144 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.552241 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.552529 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.554612 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.563659 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.581086 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-gcfdf"] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.581465 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.583661 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.583764 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.583968 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.584071 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-vhdcn"] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.584111 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.583676 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.584553 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.586261 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.586510 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.586631 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.586750 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.586767 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.587768 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gbxzg"] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.588038 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.589222 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.589217 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.589281 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.600491 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.613818 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.616716 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.616745 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.616772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.616801 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.616811 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.622292 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.630381 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.639073 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.646346 4813 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.646676 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.653226 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.661162 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.668067 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.674610 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680420 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680451 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680467 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680484 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680502 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680519 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680534 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680550 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680566 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680581 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680655 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680692 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680727 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680744 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680750 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680752 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680759 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680815 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680835 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680853 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680869 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680889 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680904 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680934 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.680937 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681005 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681021 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681026 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681037 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681057 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681052 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681109 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681093 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681147 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681164 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681194 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681210 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681226 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681244 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681260 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681275 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681292 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681308 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681315 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681341 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681405 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681439 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681463 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681481 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681486 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681514 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681526 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681531 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681535 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681606 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681669 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681695 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681718 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681741 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681763 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681767 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681772 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681829 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681855 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681878 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681900 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681922 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681943 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681966 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681988 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682012 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682035 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682056 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682081 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682105 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682129 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682151 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682175 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682198 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681793 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682221 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682247 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682290 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682313 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682355 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682378 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682401 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682423 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682444 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682466 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682489 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682514 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682537 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682559 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682580 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682603 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682624 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682648 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682673 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682695 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682719 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682741 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682763 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682809 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681815 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681940 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.681970 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682053 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682070 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682134 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682186 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682208 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682288 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682342 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682345 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682496 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682495 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682498 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682568 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682633 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682728 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682775 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.682838 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:15.182817868 +0000 UTC m=+21.261073479 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689014 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689043 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689156 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689222 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689281 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689314 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689367 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689417 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689433 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689498 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689501 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689639 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689882 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689970 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.690273 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.690504 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682972 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.682980 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683085 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683146 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683144 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683197 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683274 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683305 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683354 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683705 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.683709 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684022 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684171 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684427 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684488 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684853 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684858 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.684921 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685049 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685215 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685230 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685238 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685255 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685842 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.685897 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.686529 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.687476 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.687549 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.687574 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.687592 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.687622 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.688279 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.688472 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.688576 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.688761 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.688829 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.690696 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.691142 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.691263 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.691371 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.689813 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692210 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692275 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692423 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692449 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692519 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692615 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692693 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.692923 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693054 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693114 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693185 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693246 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693297 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693317 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693382 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693405 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693429 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693494 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693556 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693687 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693795 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693856 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693904 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.693952 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694016 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694086 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694151 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694357 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694377 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694447 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694568 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694623 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694681 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694736 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694755 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694793 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694813 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.694896 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695018 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695038 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695134 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695155 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695200 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695262 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695311 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.695667 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.696550 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.696829 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.702878 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.703110 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.703220 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.703602 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.705050 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.705166 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.705748 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.705962 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706083 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706254 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706762 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706824 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706862 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706896 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706930 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.706965 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707003 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707032 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707064 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707095 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707125 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707155 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707184 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707213 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707242 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707263 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707291 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707351 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707373 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707395 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707423 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707455 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707481 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707503 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707531 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707559 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707592 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707624 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707656 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707690 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707712 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707740 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707767 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707787 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707808 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707831 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707855 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707880 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707906 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707932 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707953 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.707976 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708001 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708028 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708064 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708087 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708112 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708144 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708209 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9cr8\" (UniqueName: \"kubernetes.io/projected/f693a3c5-0ae5-4bf9-9e60-027f67537f9a-kube-api-access-q9cr8\") pod \"node-resolver-j4kjg\" (UID: \"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\") " pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708254 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708285 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/537f8a53-dde4-4808-a822-9d8c922a8499-proxy-tls\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708316 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/76e24ee5-81b1-4538-aca5-141e399e32e9-cni-binary-copy\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-k8s-cni-cncf-io\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708379 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nmf4\" (UniqueName: \"kubernetes.io/projected/537f8a53-dde4-4808-a822-9d8c922a8499-kube-api-access-4nmf4\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708413 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9848a8f6-34ef-49f2-8263-067fc9085072-cni-binary-copy\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708446 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708470 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-cni-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708491 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-conf-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708516 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708542 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708565 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-os-release\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708588 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-cnibin\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708618 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-socket-dir-parent\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708640 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-etc-kubernetes\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708661 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/537f8a53-dde4-4808-a822-9d8c922a8499-mcd-auth-proxy-config\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708683 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-cni-multus\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708707 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708731 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708754 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f693a3c5-0ae5-4bf9-9e60-027f67537f9a-hosts-file\") pod \"node-resolver-j4kjg\" (UID: \"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\") " pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708783 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-cnibin\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.708812 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709042 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709074 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709096 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-kubelet\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709118 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-hostroot\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709141 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-system-cni-dir\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709246 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709252 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709273 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-os-release\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709305 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709404 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709537 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709673 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709748 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.709921 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710087 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9848a8f6-34ef-49f2-8263-067fc9085072-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710137 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710175 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710211 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7mnh\" (UniqueName: \"kubernetes.io/projected/76e24ee5-81b1-4538-aca5-141e399e32e9-kube-api-access-d7mnh\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710234 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/537f8a53-dde4-4808-a822-9d8c922a8499-rootfs\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710256 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-netns\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710281 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-multus-certs\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710303 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qct6q\" (UniqueName: \"kubernetes.io/projected/9848a8f6-34ef-49f2-8263-067fc9085072-kube-api-access-qct6q\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710350 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-system-cni-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710387 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710419 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710449 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710435 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710484 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-cni-bin\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710539 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710565 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-daemon-config\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710657 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710673 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710687 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710699 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710710 4813 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710720 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710717 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710731 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710760 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710837 4813 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710859 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710873 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710883 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710894 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710905 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710917 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710928 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710938 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710949 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710959 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710969 4813 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710979 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.710991 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711004 4813 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711016 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711026 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711036 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711047 4813 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711058 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711069 4813 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711078 4813 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711089 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711099 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711110 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711120 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711130 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711141 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711152 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711162 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711172 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711181 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711191 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711200 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711210 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711210 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711264 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711600 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711690 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711767 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711925 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.712556 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.712621 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.712787 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.712816 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.712875 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713085 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.711220 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713270 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713820 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713839 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713853 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713864 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713874 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713886 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713896 4813 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713907 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713920 4813 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714498 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714517 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714528 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714540 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714551 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714560 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714570 4813 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714579 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714590 4813 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714602 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714613 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714622 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714633 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714642 4813 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714652 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714661 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714672 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714681 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714690 4813 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714700 4813 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714710 4813 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714719 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714729 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714739 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714752 4813 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714761 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714772 4813 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714781 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714791 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714801 4813 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714811 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714819 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714829 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714838 4813 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714848 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714858 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714866 4813 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714875 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714884 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714893 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714903 4813 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714914 4813 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714923 4813 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714934 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714944 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714953 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713305 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713430 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713442 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716395 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.713789 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714141 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714175 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714212 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714247 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.714300 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.715005 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.715050 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.715478 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.715739 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.715856 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.715979 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716149 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716166 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716199 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716676 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716534 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.716988 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717001 4813 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717016 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717106 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717132 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717182 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717115 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717299 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.717564 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.718099 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.718173 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.718192 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:15.218168498 +0000 UTC m=+21.296424219 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.718234 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.718364 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.718430 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:15.218397275 +0000 UTC m=+21.296653026 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.718521 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.718632 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.718664 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719021 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719027 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719167 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719371 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719616 4813 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719677 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719842 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719853 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719900 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.719961 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720001 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720262 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720376 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720494 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720510 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720544 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720656 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720663 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.720833 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.721160 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.721433 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.722009 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.722036 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.722942 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723269 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723549 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723781 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723812 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723825 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.724078 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.724247 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723793 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.723937 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.724469 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.724750 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.724764 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725536 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725688 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725744 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725771 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725980 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725911 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.725908 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.726176 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.726200 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.726210 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.729271 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.729340 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.729351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.729369 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.729406 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.735175 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.736634 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.736662 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.736676 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.736727 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:15.236710777 +0000 UTC m=+21.314966388 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.736833 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.737725 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.737739 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.737748 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.737785 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:15.237775137 +0000 UTC m=+21.316030748 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.744029 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.745162 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.745447 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.748155 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595" exitCode=255 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.748196 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.748248 4813 scope.go:117] "RemoveContainer" containerID="10fd3585683137908193ef9592477296a6ccb8c27557170ea153695d89adf8d4" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.748382 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.748542 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.751539 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.751899 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.754812 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.761700 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.765214 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.767906 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.771830 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.772101 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.781074 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.790072 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.797718 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.805100 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.814090 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818226 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/76e24ee5-81b1-4538-aca5-141e399e32e9-cni-binary-copy\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818259 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-k8s-cni-cncf-io\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818277 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nmf4\" (UniqueName: \"kubernetes.io/projected/537f8a53-dde4-4808-a822-9d8c922a8499-kube-api-access-4nmf4\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818293 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-cni-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818365 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-k8s-cni-cncf-io\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818522 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-conf-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.818753 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-cni-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819052 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/76e24ee5-81b1-4538-aca5-141e399e32e9-cni-binary-copy\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-conf-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819747 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9848a8f6-34ef-49f2-8263-067fc9085072-cni-binary-copy\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819773 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-os-release\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-cnibin\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819807 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-socket-dir-parent\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819821 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-etc-kubernetes\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819837 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-cni-multus\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819851 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/537f8a53-dde4-4808-a822-9d8c922a8499-mcd-auth-proxy-config\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819869 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-cnibin\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819894 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f693a3c5-0ae5-4bf9-9e60-027f67537f9a-hosts-file\") pod \"node-resolver-j4kjg\" (UID: \"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\") " pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-kubelet\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819924 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-hostroot\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.819937 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-system-cni-dir\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820164 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-os-release\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820187 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9848a8f6-34ef-49f2-8263-067fc9085072-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820203 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820219 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7mnh\" (UniqueName: \"kubernetes.io/projected/76e24ee5-81b1-4538-aca5-141e399e32e9-kube-api-access-d7mnh\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820233 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/537f8a53-dde4-4808-a822-9d8c922a8499-rootfs\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820248 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-system-cni-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820262 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-netns\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820276 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-multus-certs\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820291 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qct6q\" (UniqueName: \"kubernetes.io/projected/9848a8f6-34ef-49f2-8263-067fc9085072-kube-api-access-qct6q\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820305 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820338 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820353 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-cni-bin\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820366 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-daemon-config\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820380 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9cr8\" (UniqueName: \"kubernetes.io/projected/f693a3c5-0ae5-4bf9-9e60-027f67537f9a-kube-api-access-q9cr8\") pod \"node-resolver-j4kjg\" (UID: \"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\") " pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820394 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/537f8a53-dde4-4808-a822-9d8c922a8499-proxy-tls\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820432 4813 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820441 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820450 4813 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820458 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820467 4813 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820475 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820483 4813 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820493 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820502 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820512 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820520 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820528 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820536 4813 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820545 4813 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820554 4813 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820562 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820569 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820577 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820580 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-os-release\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820586 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820610 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820619 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820629 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820638 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820646 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820654 4813 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820662 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820670 4813 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820678 4813 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820686 4813 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820694 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820703 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820712 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820720 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820729 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820737 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820745 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820753 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820762 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820771 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820779 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820787 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820795 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820803 4813 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820811 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820820 4813 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820828 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820836 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820844 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820852 4813 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820861 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820869 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820877 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820886 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820894 4813 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820902 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820910 4813 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820919 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820927 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820935 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820943 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820951 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820959 4813 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820967 4813 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820975 4813 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820982 4813 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820990 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820998 4813 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821006 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821013 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821022 4813 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821030 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821037 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821045 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821052 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821059 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821084 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821093 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821102 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821110 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821118 4813 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821126 4813 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821134 4813 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821141 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821148 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821156 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821165 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821173 4813 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821181 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821189 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821197 4813 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821204 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.820899 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-multus-certs\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821659 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9848a8f6-34ef-49f2-8263-067fc9085072-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821726 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821772 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9848a8f6-34ef-49f2-8263-067fc9085072-cni-binary-copy\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821817 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-os-release\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821841 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-cnibin\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821878 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-run-netns\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821903 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-system-cni-dir\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821903 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-cnibin\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821947 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/537f8a53-dde4-4808-a822-9d8c922a8499-rootfs\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821974 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-kubelet\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821984 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-cni-multus\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821965 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-etc-kubernetes\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.822008 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-hostroot\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.822032 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-system-cni-dir\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.821948 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-socket-dir-parent\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.822118 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76e24ee5-81b1-4538-aca5-141e399e32e9-host-var-lib-cni-bin\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.822132 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/f693a3c5-0ae5-4bf9-9e60-027f67537f9a-hosts-file\") pod \"node-resolver-j4kjg\" (UID: \"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\") " pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.822242 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.822846 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9848a8f6-34ef-49f2-8263-067fc9085072-tuning-conf-dir\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.823561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.823995 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/537f8a53-dde4-4808-a822-9d8c922a8499-proxy-tls\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.824056 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/76e24ee5-81b1-4538-aca5-141e399e32e9-multus-daemon-config\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.824565 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/537f8a53-dde4-4808-a822-9d8c922a8499-mcd-auth-proxy-config\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.830861 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.830893 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.830901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.830916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.830932 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.832673 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.837028 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7mnh\" (UniqueName: \"kubernetes.io/projected/76e24ee5-81b1-4538-aca5-141e399e32e9-kube-api-access-d7mnh\") pod \"multus-gbxzg\" (UID: \"76e24ee5-81b1-4538-aca5-141e399e32e9\") " pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.837611 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qct6q\" (UniqueName: \"kubernetes.io/projected/9848a8f6-34ef-49f2-8263-067fc9085072-kube-api-access-qct6q\") pod \"multus-additional-cni-plugins-vhdcn\" (UID: \"9848a8f6-34ef-49f2-8263-067fc9085072\") " pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.840668 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9cr8\" (UniqueName: \"kubernetes.io/projected/f693a3c5-0ae5-4bf9-9e60-027f67537f9a-kube-api-access-q9cr8\") pod \"node-resolver-j4kjg\" (UID: \"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\") " pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.842300 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.843072 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nmf4\" (UniqueName: \"kubernetes.io/projected/537f8a53-dde4-4808-a822-9d8c922a8499-kube-api-access-4nmf4\") pod \"machine-config-daemon-gcfdf\" (UID: \"537f8a53-dde4-4808-a822-9d8c922a8499\") " pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.851499 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.861209 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.863585 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.868886 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.877492 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-j4kjg" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.886497 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.894494 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.894798 4813 scope.go:117] "RemoveContainer" containerID="bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595" Oct 07 19:18:14 crc kubenswrapper[4813]: E1007 19:18:14.895041 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.902097 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.909205 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.915052 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gbxzg" Oct 07 19:18:14 crc kubenswrapper[4813]: W1007 19:18:14.929723 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod537f8a53_dde4_4808_a822_9d8c922a8499.slice/crio-2182a1eb2a0d8f8e85c87c6a557c63a035a8417d0468f78317a92449ce1fe22e WatchSource:0}: Error finding container 2182a1eb2a0d8f8e85c87c6a557c63a035a8417d0468f78317a92449ce1fe22e: Status 404 returned error can't find the container with id 2182a1eb2a0d8f8e85c87c6a557c63a035a8417d0468f78317a92449ce1fe22e Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.933076 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.933121 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.933128 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.933142 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.933150 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:14Z","lastTransitionTime":"2025-10-07T19:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.949393 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vvpdd"] Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.950163 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:14 crc kubenswrapper[4813]: W1007 19:18:14.951100 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9848a8f6_34ef_49f2_8263_067fc9085072.slice/crio-77cb2c5134bb858918ed3eadbe2d6a62ede2497e4d6f2d4ce1a21d0905e164a3 WatchSource:0}: Error finding container 77cb2c5134bb858918ed3eadbe2d6a62ede2497e4d6f2d4ce1a21d0905e164a3: Status 404 returned error can't find the container with id 77cb2c5134bb858918ed3eadbe2d6a62ede2497e4d6f2d4ce1a21d0905e164a3 Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.953056 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.954057 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.954117 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.954125 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.954125 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.954236 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.954466 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.960992 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.969043 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.977182 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.985581 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: I1007 19:18:14.995039 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:14 crc kubenswrapper[4813]: W1007 19:18:14.999188 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76e24ee5_81b1_4538_aca5_141e399e32e9.slice/crio-38eca9b1941485f2f701789586d5af598793c15b97c232c6a7ba9acca7f223af WatchSource:0}: Error finding container 38eca9b1941485f2f701789586d5af598793c15b97c232c6a7ba9acca7f223af: Status 404 returned error can't find the container with id 38eca9b1941485f2f701789586d5af598793c15b97c232c6a7ba9acca7f223af Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.005270 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.015192 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.023812 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.037510 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.037548 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.037556 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.037577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.037586 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.039581 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.051402 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10fd3585683137908193ef9592477296a6ccb8c27557170ea153695d89adf8d4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"message\\\":\\\"W1007 19:17:57.871098 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 19:17:57.871416 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759864677 cert, and key in /tmp/serving-cert-1602495533/serving-signer.crt, /tmp/serving-cert-1602495533/serving-signer.key\\\\nI1007 19:17:58.233003 1 observer_polling.go:159] Starting file observer\\\\nW1007 19:17:58.235366 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 19:17:58.235678 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:17:58.236941 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1602495533/tls.crt::/tmp/serving-cert-1602495533/tls.key\\\\\\\"\\\\nF1007 19:17:58.422529 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.084773 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.095596 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122720 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122767 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-netd\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122791 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovn-node-metrics-cert\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122812 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-kubelet\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122832 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-log-socket\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122852 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-systemd\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122873 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-node-log\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122892 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-systemd-units\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122909 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-ovn\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122929 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122950 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-config\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122968 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-var-lib-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.122990 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-netns\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123032 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-env-overrides\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123055 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-ovn-kubernetes\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123088 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-slash\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123118 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-etc-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123138 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-bin\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123155 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-script-lib\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.123175 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmnhk\" (UniqueName: \"kubernetes.io/projected/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-kube-api-access-pmnhk\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.142122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.142147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.142155 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.142167 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.142175 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225277 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.225417 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:16.225399443 +0000 UTC m=+22.303655054 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225452 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225471 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-config\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225487 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-systemd-units\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225500 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-ovn\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225514 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-var-lib-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225530 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-netns\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225544 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-env-overrides\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225559 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-ovn-kubernetes\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225581 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225590 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-systemd-units\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225605 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-var-lib-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225594 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-slash\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225629 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-netns\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225628 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225649 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225669 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-etc-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225685 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-bin\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225699 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-script-lib\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmnhk\" (UniqueName: \"kubernetes.io/projected/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-kube-api-access-pmnhk\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225723 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-ovn-kubernetes\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225747 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225763 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-netd\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225785 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovn-node-metrics-cert\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225802 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-kubelet\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225817 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-log-socket\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225833 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-systemd\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225845 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-node-log\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225877 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-node-log\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225896 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225914 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-netd\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225933 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-ovn\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.225613 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-slash\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.226001 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.226040 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:16.22603075 +0000 UTC m=+22.304286361 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226224 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-etc-openvswitch\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226250 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-bin\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226361 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-env-overrides\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.226398 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.226420 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:16.22641342 +0000 UTC m=+22.304669031 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226441 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-log-socket\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-kubelet\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226480 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-systemd\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.226721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-script-lib\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.227277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-config\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.233602 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovn-node-metrics-cert\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.243746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.243780 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.243789 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.243810 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.243820 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.248930 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmnhk\" (UniqueName: \"kubernetes.io/projected/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-kube-api-access-pmnhk\") pod \"ovnkube-node-vvpdd\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.273810 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:15 crc kubenswrapper[4813]: W1007 19:18:15.284799 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3bc364e_ed17_44b8_9942_b41e6b8ac13a.slice/crio-6c2f49e8f4d1607718890acde42df8569e1a17c6b27b0d95392b1eda28663d48 WatchSource:0}: Error finding container 6c2f49e8f4d1607718890acde42df8569e1a17c6b27b0d95392b1eda28663d48: Status 404 returned error can't find the container with id 6c2f49e8f4d1607718890acde42df8569e1a17c6b27b0d95392b1eda28663d48 Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.326482 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.326654 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326606 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326709 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326757 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326811 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:16.326797706 +0000 UTC m=+22.405053317 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326901 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326924 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326937 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.326992 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:16.326975081 +0000 UTC m=+22.405230742 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.345397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.345425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.345432 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.345445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.345454 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.447979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.448018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.448026 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.448041 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.448051 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.550220 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.550250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.550259 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.550272 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.550281 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.652703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.652738 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.652746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.652759 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.652769 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.751488 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-j4kjg" event={"ID":"f693a3c5-0ae5-4bf9-9e60-027f67537f9a","Type":"ContainerStarted","Data":"5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.751553 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-j4kjg" event={"ID":"f693a3c5-0ae5-4bf9-9e60-027f67537f9a","Type":"ContainerStarted","Data":"606148c0d46af7107837de4ee5461e1086d0b290c28f405f8526670986c24003"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.753396 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754022 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"3f993fa712b0030de7ef112e7d99998e91da75198981fb6a0ddb5d77686c15e2"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754255 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754278 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754290 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754537 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerStarted","Data":"6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.754555 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerStarted","Data":"38eca9b1941485f2f701789586d5af598793c15b97c232c6a7ba9acca7f223af"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.755599 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41" exitCode=0 Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.755626 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.755661 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"6c2f49e8f4d1607718890acde42df8569e1a17c6b27b0d95392b1eda28663d48"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.757003 4813 generic.go:334] "Generic (PLEG): container finished" podID="9848a8f6-34ef-49f2-8263-067fc9085072" containerID="acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982" exitCode=0 Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.757084 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerDied","Data":"acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.757124 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerStarted","Data":"77cb2c5134bb858918ed3eadbe2d6a62ede2497e4d6f2d4ce1a21d0905e164a3"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.763526 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.763563 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.763574 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"2182a1eb2a0d8f8e85c87c6a557c63a035a8417d0468f78317a92449ce1fe22e"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.764718 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"502acc00e2d8f2752f658ff8cfc6de44f8ffce3e34a1395d1b0fc7a3fa802132"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.766146 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.771930 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.777212 4813 scope.go:117] "RemoveContainer" containerID="bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595" Oct 07 19:18:15 crc kubenswrapper[4813]: E1007 19:18:15.777366 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.778066 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.778091 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.778101 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a26fb741416fb52ccce6da376f6694864b473b6102025b14214677ef5882cf2f"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.782865 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.790252 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.798393 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.816497 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.829235 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10fd3585683137908193ef9592477296a6ccb8c27557170ea153695d89adf8d4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"message\\\":\\\"W1007 19:17:57.871098 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1007 19:17:57.871416 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759864677 cert, and key in /tmp/serving-cert-1602495533/serving-signer.crt, /tmp/serving-cert-1602495533/serving-signer.key\\\\nI1007 19:17:58.233003 1 observer_polling.go:159] Starting file observer\\\\nW1007 19:17:58.235366 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1007 19:17:58.235678 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:17:58.236941 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1602495533/tls.crt::/tmp/serving-cert-1602495533/tls.key\\\\\\\"\\\\nF1007 19:17:58.422529 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.843173 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.855089 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.861800 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.868169 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.873648 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.877187 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.877225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.877234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.877251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.877260 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.882074 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.895947 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.925424 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.934470 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.941921 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.950874 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.958205 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.970598 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.979830 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.980514 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.980552 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.980564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.980580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.980595 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:15Z","lastTransitionTime":"2025-10-07T19:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.987076 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.993970 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:15 crc kubenswrapper[4813]: I1007 19:18:15.998712 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.004471 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.083575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.083614 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.083634 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.083648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.083657 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.186270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.186306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.186314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.186346 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.186356 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.234753 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.234857 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.234877 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:18.234860305 +0000 UTC m=+24.313115916 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.234899 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.234920 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.234951 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:18.234944027 +0000 UTC m=+24.313199638 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.235004 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.235034 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:18.235028449 +0000 UTC m=+24.313284060 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.288109 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.288143 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.288152 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.288168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.288178 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.305380 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.322131 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.324100 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.326873 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.335681 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.335717 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335832 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335854 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335865 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335911 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335949 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:18.335891218 +0000 UTC m=+24.414146819 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335953 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.335970 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.336031 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:18.336011301 +0000 UTC m=+24.414266922 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.339861 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.356068 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.369358 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.383508 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.391019 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.391083 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.391099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.391125 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.391142 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.395724 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.409377 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.420864 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.437990 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.460426 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.469486 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.481073 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.493374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.493406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.493414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.493430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.493440 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.513867 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.556314 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.591976 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.595642 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.595665 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.595672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.595685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.595693 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.605563 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.605800 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.605909 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.606076 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.606225 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.606315 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.609145 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.611897 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.614298 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.616117 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.619676 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.620728 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.621737 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.623348 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.624439 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.625801 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.626497 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.627729 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.628187 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.629184 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.629901 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.630621 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.631614 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.632023 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.633171 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.633939 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.634551 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.635849 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.636464 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.636610 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.640204 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.640841 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.642173 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.643765 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.644414 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.645346 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.645862 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.646357 4813 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.646459 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.649029 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.649659 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.650570 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.651980 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.652684 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.653608 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.654301 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.655713 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.656180 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.657495 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.658165 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.659083 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.659566 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.660689 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.661231 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.662394 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.662879 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.663749 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.664194 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.665116 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.665771 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.666213 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.680690 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.697211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.697250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.697260 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.697277 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.697288 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.717045 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.755619 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.784383 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerStarted","Data":"0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.791398 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.791448 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.791461 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.791473 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.800142 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.802762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.803532 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.803553 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.803589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.803603 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: E1007 19:18:16.815070 4813 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.852869 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.894043 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.905693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.905732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.905741 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.905756 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.905767 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:16Z","lastTransitionTime":"2025-10-07T19:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.942632 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:16 crc kubenswrapper[4813]: I1007 19:18:16.974869 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:16Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.007128 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.007155 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.007164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.007176 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.007184 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.017492 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.054583 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.093202 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.109395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.109428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.109437 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.109450 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.109459 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.135904 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.150610 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.156459 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.172285 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.191592 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.211574 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.211609 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.211618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.211651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.211660 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.240230 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.273674 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.313514 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.313557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.313570 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.313588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.313599 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.349464 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.377069 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.404654 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.415903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.416140 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.416216 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.416295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.416409 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.432926 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.476730 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.512604 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.519984 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.520224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.520302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.520430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.520520 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.554109 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.596581 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.622646 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.622855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.622935 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.622998 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.623072 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.637111 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.675971 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.712929 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.725381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.725590 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.725674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.725741 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.725794 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.760285 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.799435 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.799836 4813 generic.go:334] "Generic (PLEG): container finished" podID="9848a8f6-34ef-49f2-8263-067fc9085072" containerID="0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804" exitCode=0 Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.799958 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerDied","Data":"0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.805615 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.815085 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.815194 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.827786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.827809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.827818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.827830 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.827838 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.838906 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.874645 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.918090 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.931153 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.931214 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.931224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.931239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.931248 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:17Z","lastTransitionTime":"2025-10-07T19:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.953524 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:17 crc kubenswrapper[4813]: I1007 19:18:17.994732 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:17Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.032853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.032891 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.032901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.032915 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.032923 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.035626 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.074714 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.123228 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.134588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.134819 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.134899 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.135005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.135079 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.153215 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.193881 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.237153 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.237471 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.237512 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.237521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.237536 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.237545 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.253060 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.253176 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.253203 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:22.253185299 +0000 UTC m=+28.331440910 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.253226 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.253277 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.253366 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.253377 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:22.253348203 +0000 UTC m=+28.331603814 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.253405 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:22.253397915 +0000 UTC m=+28.331653526 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.278037 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.317506 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.339679 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.339719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.339730 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.339749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.339760 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.354385 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.354637 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.354677 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354804 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354812 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354830 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354840 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354846 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354856 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354893 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:22.35487844 +0000 UTC m=+28.433134051 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.354913 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:22.354905261 +0000 UTC m=+28.433160872 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.400773 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.436181 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.441947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.441977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.441989 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.442008 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.442020 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.479715 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.517167 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.544336 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.544375 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.544386 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.544407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.544419 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.565244 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.597942 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.602119 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.602188 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.602234 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.602341 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.602496 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:18 crc kubenswrapper[4813]: E1007 19:18:18.602729 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.635848 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.647209 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.647259 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.647270 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.647288 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.647310 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.677934 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.751313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.751484 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.751510 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.751581 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.751605 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.820354 4813 generic.go:334] "Generic (PLEG): container finished" podID="9848a8f6-34ef-49f2-8263-067fc9085072" containerID="fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f" exitCode=0 Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.820619 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerDied","Data":"fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.847525 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.853866 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.853895 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.853903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.853915 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.853925 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.868865 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.881787 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.900655 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.915016 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.934021 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.955013 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.956402 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.956464 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.956481 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.956507 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.956524 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:18Z","lastTransitionTime":"2025-10-07T19:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:18 crc kubenswrapper[4813]: I1007 19:18:18.993280 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:18Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.043044 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.060223 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.060264 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.060278 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.060302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.060317 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.078351 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.117034 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.157652 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.163309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.163392 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.163405 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.163425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.163440 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.202510 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.256079 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.265115 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.265175 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.265189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.265226 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.265242 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.367493 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.367548 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.367567 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.367597 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.367615 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.470698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.470765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.470777 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.470804 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.470818 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.573522 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.573561 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.573572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.573594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.573611 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.676086 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.676123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.676134 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.676150 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.676163 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.718784 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-w6x2v"] Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.719137 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.721947 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.722124 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.724578 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.724739 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.751359 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.767342 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.778192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.778215 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.778223 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.778236 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.778245 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.785459 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.796953 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.824254 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.826949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.829494 4813 generic.go:334] "Generic (PLEG): container finished" podID="9848a8f6-34ef-49f2-8263-067fc9085072" containerID="c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b" exitCode=0 Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.829519 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerDied","Data":"c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.851726 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.870254 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.870504 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25glw\" (UniqueName: \"kubernetes.io/projected/3ea96b97-cd23-4821-9abd-abde3204afbb-kube-api-access-25glw\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.870541 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3ea96b97-cd23-4821-9abd-abde3204afbb-serviceca\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.870562 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3ea96b97-cd23-4821-9abd-abde3204afbb-host\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.880791 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.880817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.880824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.880840 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.880851 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.883056 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.898342 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.913097 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.928909 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.952990 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.967138 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.971842 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25glw\" (UniqueName: \"kubernetes.io/projected/3ea96b97-cd23-4821-9abd-abde3204afbb-kube-api-access-25glw\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.971871 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3ea96b97-cd23-4821-9abd-abde3204afbb-host\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.971885 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3ea96b97-cd23-4821-9abd-abde3204afbb-serviceca\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.972280 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3ea96b97-cd23-4821-9abd-abde3204afbb-host\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.973929 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3ea96b97-cd23-4821-9abd-abde3204afbb-serviceca\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.978685 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:19Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.982699 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.982758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.982771 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.982787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.982797 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:19Z","lastTransitionTime":"2025-10-07T19:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:19 crc kubenswrapper[4813]: I1007 19:18:19.990148 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25glw\" (UniqueName: \"kubernetes.io/projected/3ea96b97-cd23-4821-9abd-abde3204afbb-kube-api-access-25glw\") pod \"node-ca-w6x2v\" (UID: \"3ea96b97-cd23-4821-9abd-abde3204afbb\") " pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.004634 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.034997 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-w6x2v" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.038805 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.055307 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.070008 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.085761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.085784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.085794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.085808 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.085816 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.091204 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.138549 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.177358 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.188526 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.188554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.188562 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.188577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.188585 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.215539 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.253960 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.291045 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.291073 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.291080 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.291093 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.291135 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.294736 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.343616 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.381292 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.396206 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.396272 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.396287 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.396309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.396344 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.416932 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.458017 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.496778 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.498498 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.498583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.498641 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.498723 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.498779 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.535125 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.601557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.601871 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.602043 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.601782 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.601639 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.601732 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.602206 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: E1007 19:18:20.603492 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.603501 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: E1007 19:18:20.603458 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:20 crc kubenswrapper[4813]: E1007 19:18:20.602391 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.706956 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.707020 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.707039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.707063 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.707080 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.810516 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.810598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.810622 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.810654 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.810676 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.839694 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-w6x2v" event={"ID":"3ea96b97-cd23-4821-9abd-abde3204afbb","Type":"ContainerStarted","Data":"89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.839793 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-w6x2v" event={"ID":"3ea96b97-cd23-4821-9abd-abde3204afbb","Type":"ContainerStarted","Data":"de61808da0f10b5a33b2fe9d9ebedf497a239808c1f37d2198b1b9f2b7ae0bf2"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.849617 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerStarted","Data":"73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.880547 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.904654 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.913828 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.913878 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.913896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.913922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.913938 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:20Z","lastTransitionTime":"2025-10-07T19:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.926385 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.944967 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.967395 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:20 crc kubenswrapper[4813]: I1007 19:18:20.986817 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:20Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.008381 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.017668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.017704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.017717 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.017734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.017747 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.022390 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.035729 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.055611 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.070234 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.084894 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.102101 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.117161 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.120138 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.120184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.120201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.120225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.120243 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.149889 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.177688 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.215696 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.222338 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.222400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.222411 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.222436 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.222449 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.256078 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.294979 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.325750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.326090 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.326219 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.326380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.326525 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.339293 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.389290 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.422789 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.429415 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.429484 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.429521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.429564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.429586 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.460945 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.499196 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.533297 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.533722 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.533743 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.533767 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.533787 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.540101 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.588749 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.636665 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.636877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.636974 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.637354 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.637564 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.637249 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.671192 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.696192 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.735388 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.740199 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.740296 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.740385 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.740459 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.740532 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.843122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.845042 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.845106 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.845135 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.845147 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.855761 4813 generic.go:334] "Generic (PLEG): container finished" podID="9848a8f6-34ef-49f2-8263-067fc9085072" containerID="73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19" exitCode=0 Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.855837 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerDied","Data":"73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.861514 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.861882 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.862403 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.872159 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.887044 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.895024 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.895676 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.900458 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.911944 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.939763 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.947381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.947426 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.947441 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.947462 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.947480 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:21Z","lastTransitionTime":"2025-10-07T19:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:21 crc kubenswrapper[4813]: I1007 19:18:21.977535 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:21Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.016384 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.050757 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.050790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.050799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.050814 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.050824 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.055920 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.092353 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.149055 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.153461 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.153536 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.153554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.153577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.153647 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.176804 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.214450 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.255279 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.255809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.255868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.255885 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.255906 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.255923 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.295488 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.295616 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:30.295590147 +0000 UTC m=+36.373845798 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.295588 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.295667 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.295775 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.295875 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.295933 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:30.295920296 +0000 UTC m=+36.374175937 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.295961 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.296046 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:30.296019318 +0000 UTC m=+36.374274969 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.340316 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.358865 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.358922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.358944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.358968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.358985 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.382809 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.397131 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.397197 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397434 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397505 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397525 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397536 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397569 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397589 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397604 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:30.397580816 +0000 UTC m=+36.475836467 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.397658 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:30.397635908 +0000 UTC m=+36.475891559 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.425772 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.462373 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.462693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.462707 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.462726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.462739 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.464959 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.495412 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.539279 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.565404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.565470 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.565495 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.565527 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.565549 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.582831 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.602175 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.602389 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.602369 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.602684 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.602870 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.603055 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.621231 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.666746 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.668184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.668236 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.668254 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.668277 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.668293 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.696043 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.702218 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.703095 4813 scope.go:117] "RemoveContainer" containerID="bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595" Oct 07 19:18:22 crc kubenswrapper[4813]: E1007 19:18:22.703379 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.736431 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.771612 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.771824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.771887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.771977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.772216 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.775131 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.830612 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.859706 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.869051 4813 generic.go:334] "Generic (PLEG): container finished" podID="9848a8f6-34ef-49f2-8263-067fc9085072" containerID="cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78" exitCode=0 Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.869138 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerDied","Data":"cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.869249 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.874406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.874464 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.874488 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.874518 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.874540 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.907199 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.941911 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.974262 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:22Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.977688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.977721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.977732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.977746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:22 crc kubenswrapper[4813]: I1007 19:18:22.977756 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:22Z","lastTransitionTime":"2025-10-07T19:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.015963 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.053985 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.080380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.080412 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.080420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.080434 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.080443 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.095588 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.141799 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.179212 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.182567 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.182603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.182618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.182640 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.182657 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.228906 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.256973 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.285713 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.285770 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.285792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.285820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.285842 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.294235 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.337890 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.388183 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.388220 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.388230 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.388245 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.388257 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.390172 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.418470 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.464448 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.491090 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.491138 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.491154 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.491176 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.491192 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.497209 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.540408 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.594145 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.594212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.594235 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.594265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.594286 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.697506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.697573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.697594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.697625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.697646 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.800393 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.800462 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.800480 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.800501 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.800520 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.879423 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" event={"ID":"9848a8f6-34ef-49f2-8263-067fc9085072","Type":"ContainerStarted","Data":"8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.879548 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.899921 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.902831 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.902878 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.902895 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.902917 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.902935 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:23Z","lastTransitionTime":"2025-10-07T19:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.920526 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.945469 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.963198 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:23 crc kubenswrapper[4813]: I1007 19:18:23.983267 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:23Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.004919 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.006228 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.006287 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.006304 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.006351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.006370 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.028306 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.048248 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.063624 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.099778 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.109554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.109819 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.109908 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.109991 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.110079 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.119629 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.155130 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.169474 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.188141 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.212547 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.212582 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.212593 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.212610 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.212624 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.221356 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.314642 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.314677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.314688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.314702 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.314712 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.417032 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.417269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.417404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.417506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.417591 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.463883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.464141 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.464242 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.464348 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.464468 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.480762 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.484733 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.484833 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.484902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.484964 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.485019 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.499530 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.502555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.502591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.502600 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.502615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.502624 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.516924 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.520388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.520455 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.520492 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.520511 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.520524 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.535371 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.538193 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.538222 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.538233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.538250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.538260 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.549317 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.549484 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.550844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.550869 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.550880 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.550896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.550909 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.602492 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.602681 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.602959 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.603132 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.603244 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:24 crc kubenswrapper[4813]: E1007 19:18:24.603364 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.625958 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.639116 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.649815 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.653845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.653950 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.654009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.654070 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.654133 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.665775 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.679383 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.706407 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.732632 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.754167 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.761163 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.761205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.761216 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.761233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.761245 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.771469 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.790598 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.813681 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.826431 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.845996 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.854979 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.863828 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.863851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.863859 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.863870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.863879 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.868440 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.882713 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/0.log" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.885380 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771" exitCode=1 Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.885420 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.886030 4813 scope.go:117] "RemoveContainer" containerID="e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.901086 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.916382 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.930840 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.946694 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.957209 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.966168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.966211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.966229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.966252 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.966270 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:24Z","lastTransitionTime":"2025-10-07T19:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:24 crc kubenswrapper[4813]: I1007 19:18:24.978142 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:24Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.020727 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"r *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 19:18:24.828156 6001 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828184 6001 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828229 6001 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.829169 6001 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:18:24.829208 6001 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 19:18:24.829219 6001 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 19:18:24.829277 6001 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:18:24.829284 6001 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:18:24.829307 6001 factory.go:656] Stopping watch factory\\\\nI1007 19:18:24.829341 6001 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.061577 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.068721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.068764 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.068780 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.068803 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.068865 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.102265 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.139988 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.174488 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.174553 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.174572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.174598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.174621 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.183628 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.231206 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.261690 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.278205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.278246 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.278262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.278284 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.278301 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.304777 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.335626 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.385738 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.385799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.385816 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.385838 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.385859 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.489353 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.489403 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.489420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.489440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.489454 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.591881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.591917 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.591926 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.591940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.591948 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.694894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.694934 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.694947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.694966 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.694981 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.797580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.797619 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.797630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.797647 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.797657 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.891029 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/0.log" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.894054 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.894227 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.899220 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.899251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.899261 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.899275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.899286 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:25Z","lastTransitionTime":"2025-10-07T19:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.910186 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.922539 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.936411 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.945400 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.954898 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.970987 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"r *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 19:18:24.828156 6001 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828184 6001 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828229 6001 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.829169 6001 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:18:24.829208 6001 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 19:18:24.829219 6001 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 19:18:24.829277 6001 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:18:24.829284 6001 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:18:24.829307 6001 factory.go:656] Stopping watch factory\\\\nI1007 19:18:24.829341 6001 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:25 crc kubenswrapper[4813]: I1007 19:18:25.982723 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.000965 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.001819 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.001849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.001857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.001870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.001879 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.011975 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.027377 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.056195 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.075478 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.087561 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.099018 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.103947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.104005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.104024 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.104048 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.104065 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.113730 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.206853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.207143 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.207282 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.207440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.207641 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.309854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.310308 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.310549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.310941 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.311286 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.414976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.415361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.415561 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.415704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.415837 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.519062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.519533 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.519706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.519862 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.519995 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.602574 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.602612 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:26 crc kubenswrapper[4813]: E1007 19:18:26.602715 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.602580 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:26 crc kubenswrapper[4813]: E1007 19:18:26.602850 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:26 crc kubenswrapper[4813]: E1007 19:18:26.602979 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.622746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.622782 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.622793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.622809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.622821 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.725484 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.726204 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.726403 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.726584 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.726787 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.829470 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.829825 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.829967 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.830094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.830213 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.906852 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/1.log" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.908363 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/0.log" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.913362 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12" exitCode=1 Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.913388 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.913749 4813 scope.go:117] "RemoveContainer" containerID="e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.915275 4813 scope.go:117] "RemoveContainer" containerID="7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12" Oct 07 19:18:26 crc kubenswrapper[4813]: E1007 19:18:26.915584 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.932998 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.933049 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.933065 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.933089 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.933106 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:26Z","lastTransitionTime":"2025-10-07T19:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.936975 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.959022 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.973824 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:26 crc kubenswrapper[4813]: I1007 19:18:26.990998 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:26Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.011811 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.037536 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.037600 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.037623 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.037652 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.037676 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.038031 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.058684 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.076282 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.107146 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"r *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 19:18:24.828156 6001 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828184 6001 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828229 6001 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.829169 6001 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:18:24.829208 6001 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 19:18:24.829219 6001 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 19:18:24.829277 6001 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:18:24.829284 6001 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:18:24.829307 6001 factory.go:656] Stopping watch factory\\\\nI1007 19:18:24.829341 6001 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.133379 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.140609 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.140680 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.140704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.140732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.140755 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.157108 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.177598 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.194590 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.208836 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn"] Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.210199 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.212770 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.213121 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.267727 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.267969 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gdv6\" (UniqueName: \"kubernetes.io/projected/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-kube-api-access-2gdv6\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.268056 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.268088 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.271642 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.272438 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.272476 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.272492 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.272514 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.272530 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.285956 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.302058 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.315086 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.329238 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.349648 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.362833 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.368855 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gdv6\" (UniqueName: \"kubernetes.io/projected/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-kube-api-access-2gdv6\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.368952 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.369005 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.369038 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.370236 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.370305 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.377663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.377692 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.377703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.377719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.377730 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.378541 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.379957 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.395933 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gdv6\" (UniqueName: \"kubernetes.io/projected/8d5f7d58-e213-4c3e-a36a-c7603a593bbe-kube-api-access-2gdv6\") pod \"ovnkube-control-plane-749d76644c-fpxxn\" (UID: \"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.403032 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.421433 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.441393 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.459998 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.479658 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.480381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.480449 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.480472 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.480506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.480527 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.509113 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"r *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 19:18:24.828156 6001 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828184 6001 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828229 6001 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.829169 6001 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:18:24.829208 6001 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 19:18:24.829219 6001 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 19:18:24.829277 6001 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:18:24.829284 6001 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:18:24.829307 6001 factory.go:656] Stopping watch factory\\\\nI1007 19:18:24.829341 6001 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.534858 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.553450 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.574388 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.576580 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.583851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.583898 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.583916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.583940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.583956 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.596458 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:27Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.686650 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.686686 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.686694 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.686708 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.686716 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.788404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.788435 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.788447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.788461 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.788470 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.891382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.891431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.891447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.891466 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.891481 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.919099 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" event={"ID":"8d5f7d58-e213-4c3e-a36a-c7603a593bbe","Type":"ContainerStarted","Data":"2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.919156 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" event={"ID":"8d5f7d58-e213-4c3e-a36a-c7603a593bbe","Type":"ContainerStarted","Data":"b8633e32935761fead0aa81691be3532e49f0e215508a45ccf8b5478f2601a22"} Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.921639 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/1.log" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.993953 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.993984 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.993993 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.994005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:27 crc kubenswrapper[4813]: I1007 19:18:27.994014 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:27Z","lastTransitionTime":"2025-10-07T19:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.095991 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.096238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.096247 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.096262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.096270 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.198916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.198951 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.198965 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.198985 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.198998 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.302548 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.302591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.302601 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.302618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.302630 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.405630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.405688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.405705 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.405731 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.405749 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.508684 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.508742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.508762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.508793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.508815 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.602209 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.602409 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:28 crc kubenswrapper[4813]: E1007 19:18:28.602494 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.602554 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:28 crc kubenswrapper[4813]: E1007 19:18:28.602734 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:28 crc kubenswrapper[4813]: E1007 19:18:28.602855 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.610951 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.611018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.611036 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.611057 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.611074 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.714359 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.714424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.714441 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.714467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.714485 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.741877 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-nz8v5"] Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.742838 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:28 crc kubenswrapper[4813]: E1007 19:18:28.742961 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.759820 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.784629 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.784959 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwtb2\" (UniqueName: \"kubernetes.io/projected/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-kube-api-access-cwtb2\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.793230 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.825796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.825864 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.825881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.825908 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.825929 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.828083 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.851525 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.870076 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.886399 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.886502 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwtb2\" (UniqueName: \"kubernetes.io/projected/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-kube-api-access-cwtb2\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:28 crc kubenswrapper[4813]: E1007 19:18:28.886948 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:28 crc kubenswrapper[4813]: E1007 19:18:28.887184 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:29.387148301 +0000 UTC m=+35.465403942 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.896269 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.915909 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.925886 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwtb2\" (UniqueName: \"kubernetes.io/projected/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-kube-api-access-cwtb2\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.929976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.930036 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.930061 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.930122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.930148 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:28Z","lastTransitionTime":"2025-10-07T19:18:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.934309 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" event={"ID":"8d5f7d58-e213-4c3e-a36a-c7603a593bbe","Type":"ContainerStarted","Data":"704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153"} Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.939558 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.959161 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:28 crc kubenswrapper[4813]: I1007 19:18:28.977841 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.000714 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:28Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.023164 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.033509 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.033557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.033576 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.033599 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.033615 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.043714 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.076387 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"r *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 19:18:24.828156 6001 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828184 6001 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828229 6001 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.829169 6001 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:18:24.829208 6001 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 19:18:24.829219 6001 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 19:18:24.829277 6001 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:18:24.829284 6001 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:18:24.829307 6001 factory.go:656] Stopping watch factory\\\\nI1007 19:18:24.829341 6001 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.097235 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.115184 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.135799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.135833 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.135845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.135862 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.135874 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.138243 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.168798 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.185088 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.200172 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.212920 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.226568 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.238209 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.238304 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.238344 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.238372 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.238402 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.240981 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.253996 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.272961 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.285869 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.298961 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.314675 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.329541 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.341632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.341676 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.341688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.341708 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.341723 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.346361 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.365076 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.379836 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.391574 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:29 crc kubenswrapper[4813]: E1007 19:18:29.391827 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:29 crc kubenswrapper[4813]: E1007 19:18:29.391949 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:30.391917687 +0000 UTC m=+36.470173338 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.392898 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.414072 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7b5ae962061884e1a94867677a43371f264b37799246bb939e947f33d29c771\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:24Z\\\",\\\"message\\\":\\\"r *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1007 19:18:24.828156 6001 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828184 6001 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.828229 6001 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:24.829169 6001 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:18:24.829208 6001 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1007 19:18:24.829219 6001 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1007 19:18:24.829277 6001 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:18:24.829284 6001 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:18:24.829307 6001 factory.go:656] Stopping watch factory\\\\nI1007 19:18:24.829341 6001 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:29Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.445083 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.445358 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.445450 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.445543 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.445615 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.547832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.547900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.547922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.547951 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.547969 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.650256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.650314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.650356 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.650381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.650398 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.754374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.754448 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.754469 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.754499 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.754528 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.858815 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.859467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.859492 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.859520 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.859539 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.962304 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.962411 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.962430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.962456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:29 crc kubenswrapper[4813]: I1007 19:18:29.962475 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:29Z","lastTransitionTime":"2025-10-07T19:18:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.065278 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.065396 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.065416 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.065470 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.065490 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.168920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.168992 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.169012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.169038 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.169057 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.272466 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.272540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.272557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.272583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.272601 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.302140 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.302384 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.302429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.302642 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.302716 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:46.302694559 +0000 UTC m=+52.380950210 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.302953 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.303168 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:46.303139941 +0000 UTC m=+52.381395582 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.303454 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:46.3034361 +0000 UTC m=+52.381691751 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.375916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.376001 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.376021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.376079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.376100 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.403642 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.403865 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.404231 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:32.404198056 +0000 UTC m=+38.482453767 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.404103 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405020 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405093 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405115 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.405111 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405193 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:46.405167542 +0000 UTC m=+52.483423193 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405791 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405888 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.405952 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.406079 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:18:46.406051777 +0000 UTC m=+52.484307418 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.479683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.480082 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.480246 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.480442 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.480649 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.543767 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.545026 4813 scope.go:117] "RemoveContainer" containerID="7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.545379 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.576171 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.583271 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.583297 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.583307 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.583336 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.583349 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.601977 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.602026 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.602077 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.602083 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.602203 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.602702 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.602773 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:30 crc kubenswrapper[4813]: E1007 19:18:30.602887 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.608212 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.628583 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.645943 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.664726 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.681822 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.685903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.685955 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.685972 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.685996 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.686013 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.701312 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.720481 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.741910 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.758037 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.776620 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.788476 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.788529 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.788548 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.788574 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.788593 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.794754 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.815003 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.833275 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.849591 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.869257 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.889982 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:30Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.890922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.890957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.890968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.890986 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.890998 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.994524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.995158 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.995250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.995345 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:30 crc kubenswrapper[4813]: I1007 19:18:30.995414 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:30Z","lastTransitionTime":"2025-10-07T19:18:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.098709 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.099000 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.099103 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.099195 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.099277 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.202555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.202787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.202991 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.203088 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.203181 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.306107 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.306150 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.306169 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.306191 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.306208 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.409235 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.409283 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.409299 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.409353 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.409370 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.512099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.512152 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.512169 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.512191 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.512208 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.615320 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.615405 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.615423 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.615447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.615463 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.718302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.718538 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.718603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.718663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.718742 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.821170 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.821524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.821685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.821837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.821986 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.924851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.924894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.924911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.924931 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:31 crc kubenswrapper[4813]: I1007 19:18:31.924946 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:31Z","lastTransitionTime":"2025-10-07T19:18:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.027995 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.028033 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.028042 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.028058 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.028068 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.131406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.131451 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.131462 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.131478 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.131489 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.234806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.234857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.234874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.234902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.234919 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.339742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.339824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.339852 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.339883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.339905 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.426856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:32 crc kubenswrapper[4813]: E1007 19:18:32.427048 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:32 crc kubenswrapper[4813]: E1007 19:18:32.427118 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:36.427097157 +0000 UTC m=+42.505352808 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.443377 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.443424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.443444 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.443467 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.443485 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.546580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.546649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.546669 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.546693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.546710 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.602578 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.602672 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.602688 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:32 crc kubenswrapper[4813]: E1007 19:18:32.602790 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.602899 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:32 crc kubenswrapper[4813]: E1007 19:18:32.603077 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:32 crc kubenswrapper[4813]: E1007 19:18:32.603202 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:32 crc kubenswrapper[4813]: E1007 19:18:32.603308 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.649501 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.649562 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.649579 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.649602 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.649619 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.752617 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.752670 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.752688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.752713 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.752729 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.856028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.856097 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.856156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.856184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.856201 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.958967 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.959036 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.959056 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.959084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:32 crc kubenswrapper[4813]: I1007 19:18:32.959104 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:32Z","lastTransitionTime":"2025-10-07T19:18:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.062498 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.062557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.062575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.062602 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.062619 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.165806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.165871 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.165895 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.165925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.165947 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.268645 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.268706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.268724 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.268748 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.268766 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.371578 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.371634 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.371651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.371674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.371694 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.474499 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.474532 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.474543 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.474558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.474571 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.577181 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.577232 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.577248 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.577271 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.577288 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.680427 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.680472 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.680489 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.680515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.680533 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.783896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.783971 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.783993 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.784028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.784049 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.887245 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.887388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.887412 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.887442 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.887465 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.990076 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.990233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.990262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.990290 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:33 crc kubenswrapper[4813]: I1007 19:18:33.990314 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:33Z","lastTransitionTime":"2025-10-07T19:18:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.093646 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.093690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.093705 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.093728 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.093744 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.197291 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.197415 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.197439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.197504 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.197524 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.301197 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.301287 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.301305 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.301375 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.301401 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.404905 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.404961 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.404978 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.405002 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.405019 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.506976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.507038 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.507057 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.507079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.507095 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.602751 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.602813 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.602751 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.602918 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.602939 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.603061 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.603371 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.603503 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.610028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.610402 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.610553 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.610697 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.610842 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.643110 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.665122 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.679485 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.698808 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.713191 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.713489 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.713511 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.713534 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.713549 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.714503 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.727758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.727790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.727804 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.727822 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.727835 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.728866 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.742247 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.743933 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.746313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.746533 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.746634 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.746731 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.746816 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.756336 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.759606 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.762805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.762835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.762845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.762883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.762896 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.767362 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.777313 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.779129 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.781913 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.781949 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.781961 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.781999 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.782014 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.788954 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.792790 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.795754 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.795789 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.795802 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.795818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.795829 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.800519 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.811633 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: E1007 19:18:34.811780 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.815540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.815659 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.815734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.815793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.815855 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.820913 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.833932 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.850786 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.866947 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.879725 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:34Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.919222 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.919303 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.919317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.919393 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:34 crc kubenswrapper[4813]: I1007 19:18:34.919406 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:34Z","lastTransitionTime":"2025-10-07T19:18:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.020898 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.020961 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.020979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.021003 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.021019 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.124250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.124298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.124317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.124375 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.124395 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.226747 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.226994 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.227090 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.227153 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.227212 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.330014 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.330062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.330079 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.330102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.330118 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.432921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.432965 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.432977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.432993 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.433004 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.536087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.536539 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.536749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.536921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.537080 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.603128 4813 scope.go:117] "RemoveContainer" containerID="bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.640522 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.640561 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.640573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.640589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.640600 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.743106 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.743624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.743650 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.743680 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.743706 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.846995 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.847050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.847061 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.847078 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.847108 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.949225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.949267 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.949275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.949289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.949298 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:35Z","lastTransitionTime":"2025-10-07T19:18:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.963078 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.965717 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a"} Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.966359 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:35 crc kubenswrapper[4813]: I1007 19:18:35.985024 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:35Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.013295 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.029668 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.044479 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.052033 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.052072 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.052083 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.052100 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.052111 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.060388 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.074172 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.096441 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.114347 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.131799 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.148357 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.155092 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.155122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.155130 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.155145 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.155156 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.177622 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.197969 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.229215 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.247656 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.262083 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.262317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.262407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.262550 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.262637 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.264551 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.277075 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.286721 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:36Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.365869 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.365913 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.365926 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.365944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.365957 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.468649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.468930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.469137 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.469573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.469955 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.471048 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:36 crc kubenswrapper[4813]: E1007 19:18:36.471170 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:36 crc kubenswrapper[4813]: E1007 19:18:36.471213 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:18:44.47120019 +0000 UTC m=+50.549455801 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.572474 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.572826 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.572973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.573127 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.573272 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.601811 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.601908 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.601811 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:36 crc kubenswrapper[4813]: E1007 19:18:36.601946 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.601985 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:36 crc kubenswrapper[4813]: E1007 19:18:36.602118 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:36 crc kubenswrapper[4813]: E1007 19:18:36.602290 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:36 crc kubenswrapper[4813]: E1007 19:18:36.602340 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.675714 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.675781 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.675793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.675809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.675839 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.779796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.779831 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.779842 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.779858 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.779868 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.882870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.882912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.882924 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.882940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.882953 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.985994 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.986049 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.986084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.986141 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:36 crc kubenswrapper[4813]: I1007 19:18:36.986154 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:36Z","lastTransitionTime":"2025-10-07T19:18:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.089849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.090253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.090445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.090580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.090723 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.194102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.194156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.194174 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.194200 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.194217 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.297375 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.297428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.297440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.297458 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.297469 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.400396 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.400493 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.400525 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.400549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.400568 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.503965 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.504008 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.504024 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.504045 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.504062 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.606744 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.606784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.606796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.606814 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.606827 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.709289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.709378 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.709397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.709419 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.709435 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.813033 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.813089 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.813105 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.813128 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.813148 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.915488 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.915545 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.915563 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.915586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:37 crc kubenswrapper[4813]: I1007 19:18:37.915608 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:37Z","lastTransitionTime":"2025-10-07T19:18:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.019009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.019060 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.019081 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.019112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.019134 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.121702 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.121736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.121745 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.121758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.121771 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.224926 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.224986 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.225008 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.225035 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.225055 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.328615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.328675 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.328716 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.328745 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.328767 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.432732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.432779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.432796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.432817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.432834 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.536043 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.536102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.536132 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.536156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.536173 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.602408 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.602436 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:38 crc kubenswrapper[4813]: E1007 19:18:38.602987 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.602602 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:38 crc kubenswrapper[4813]: E1007 19:18:38.603101 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.602503 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:38 crc kubenswrapper[4813]: E1007 19:18:38.603186 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:38 crc kubenswrapper[4813]: E1007 19:18:38.602997 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.639628 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.639686 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.639703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.639727 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.639743 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.743405 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.743483 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.744203 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.744288 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.744309 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.847229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.847269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.847280 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.847297 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.847309 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.951051 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.951100 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.951111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.951128 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:38 crc kubenswrapper[4813]: I1007 19:18:38.951139 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:38Z","lastTransitionTime":"2025-10-07T19:18:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.054114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.054155 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.054167 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.054185 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.054195 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.157183 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.157227 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.157237 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.157255 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.157266 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.259752 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.259809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.259824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.259842 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.260190 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.363401 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.363461 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.363487 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.363595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.363671 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.467039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.467098 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.467120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.467150 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.467172 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.571233 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.571302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.571361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.571393 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.571416 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.673682 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.673713 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.673721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.673734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.673743 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.775741 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.775778 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.775788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.775803 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.775814 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.879555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.879621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.879645 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.879672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.879693 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.982392 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.982491 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.982516 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.982546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:39 crc kubenswrapper[4813]: I1007 19:18:39.982564 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:39Z","lastTransitionTime":"2025-10-07T19:18:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.085779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.085834 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.085851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.085873 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.085890 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.190492 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.190529 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.190540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.190557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.190568 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.294029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.294085 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.294103 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.294127 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.294146 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.397726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.397809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.397843 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.397876 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.397897 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.500268 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.500390 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.500404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.500422 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.500436 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.601638 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:40 crc kubenswrapper[4813]: E1007 19:18:40.601756 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.602090 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:40 crc kubenswrapper[4813]: E1007 19:18:40.602139 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.602254 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.602253 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:40 crc kubenswrapper[4813]: E1007 19:18:40.602334 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:40 crc kubenswrapper[4813]: E1007 19:18:40.602515 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.603554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.603614 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.603631 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.603655 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.603672 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.706379 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.706453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.706475 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.706506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.706531 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.809881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.809973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.809990 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.810037 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.810062 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.913449 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.913560 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.913586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.913615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:40 crc kubenswrapper[4813]: I1007 19:18:40.913636 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:40Z","lastTransitionTime":"2025-10-07T19:18:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.016618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.016685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.016706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.016731 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.016749 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.120796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.120861 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.120879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.120907 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.120925 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.224703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.224761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.224793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.224824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.224844 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.327762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.327820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.327836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.327862 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.327878 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.430880 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.431018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.431047 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.431081 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.431107 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.534068 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.534137 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.534156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.534179 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.534196 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.602998 4813 scope.go:117] "RemoveContainer" containerID="7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.645305 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.645405 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.645423 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.645459 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.645481 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.749546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.749611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.749630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.749674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.749694 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.852564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.852622 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.852633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.852657 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.853023 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.957142 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.957202 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.957228 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.957259 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.957280 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:41Z","lastTransitionTime":"2025-10-07T19:18:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.989902 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/1.log" Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.993704 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd"} Oct 07 19:18:41 crc kubenswrapper[4813]: I1007 19:18:41.994556 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.012260 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.023823 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.205448 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.205505 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.205516 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.205532 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.205544 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.219905 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.234208 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.248175 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.260894 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.276448 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.293934 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.307753 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.307795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.307813 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.307832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.307844 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.315446 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.331242 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.343894 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.364465 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.377996 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.389968 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.399339 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.410115 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.410298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.410419 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.410515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.410600 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.410679 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.421006 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:42Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.513012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.513474 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.513656 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.513988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.514280 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.602277 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.602382 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.602527 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:42 crc kubenswrapper[4813]: E1007 19:18:42.602515 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:42 crc kubenswrapper[4813]: E1007 19:18:42.602709 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:42 crc kubenswrapper[4813]: E1007 19:18:42.602796 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.602984 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:42 crc kubenswrapper[4813]: E1007 19:18:42.603095 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.617879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.617956 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.617972 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.618026 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.618042 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.720732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.720786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.720797 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.720813 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.720825 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.823567 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.823624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.823644 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.823668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.823685 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.926944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.926988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.927003 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.927026 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.927043 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:42Z","lastTransitionTime":"2025-10-07T19:18:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:42 crc kubenswrapper[4813]: I1007 19:18:42.999393 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/2.log" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.000668 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/1.log" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.004679 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd" exitCode=1 Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.004720 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.004761 4813 scope.go:117] "RemoveContainer" containerID="7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.006136 4813 scope.go:117] "RemoveContainer" containerID="6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd" Oct 07 19:18:43 crc kubenswrapper[4813]: E1007 19:18:43.006619 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.030435 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.030424 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.030475 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.030632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.030647 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.030656 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.045987 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.061543 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.077560 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.091863 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.107359 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.121661 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.132718 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.132749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.132760 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.132776 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.132787 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.139853 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.158600 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.179604 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.195662 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.211472 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.237253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.237302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.237316 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.237356 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.237373 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.240906 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d177cdd2222807ff0aa61c9114aa21c8a1813e3b294402d082b4c2405ba6c12\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:26Z\\\",\\\"message\\\":\\\":Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1007 19:18:25.999364 6160 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:25Z is after 2025-08-24\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.265237 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.289063 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.308263 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.320478 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:43Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.340810 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.340860 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.340875 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.340894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.340908 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.443572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.443632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.443650 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.443674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.443694 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.546641 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.546750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.547256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.547426 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.547737 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.651361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.651443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.651480 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.651514 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.651535 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.754957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.755005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.755021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.755047 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.755063 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.857663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.857714 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.857730 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.857752 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.857827 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.960988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.961064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.961084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.961114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:43 crc kubenswrapper[4813]: I1007 19:18:43.961136 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:43Z","lastTransitionTime":"2025-10-07T19:18:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.011792 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/2.log" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.019065 4813 scope.go:117] "RemoveContainer" containerID="6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd" Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.019386 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.035077 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.051876 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.063635 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.063676 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.063700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.063717 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.063727 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.068768 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.085274 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.099794 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.115317 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.131050 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.144166 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.159751 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.166630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.166660 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.166668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.166698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.166710 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.175149 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.203968 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.222791 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.235301 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.251942 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.264670 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.269188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.269227 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.269238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.269257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.269269 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.289981 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.311155 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.372381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.372439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.372458 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.372484 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.372504 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.475026 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.475088 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.475127 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.475158 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.475191 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.527610 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.527787 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.527866 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:19:00.527840023 +0000 UTC m=+66.606095654 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.578460 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.578512 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.578530 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.578553 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.578571 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.601808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.601953 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.602607 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.602675 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.602755 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.602761 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.602832 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:44 crc kubenswrapper[4813]: E1007 19:18:44.602905 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.626525 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.643225 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.656428 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.674753 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.682708 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.682734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.682741 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.682755 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.682764 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.690181 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.710111 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.726224 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.739844 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.753393 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.777220 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.785168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.785222 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.785239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.785263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.785280 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.794621 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.821649 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.841787 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.859949 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.884172 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.888099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.888387 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.888598 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.888796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.888991 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.899008 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.936005 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.980055 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.980096 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.980112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.980135 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:44 crc kubenswrapper[4813]: I1007 19:18:44.980152 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:44Z","lastTransitionTime":"2025-10-07T19:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: E1007 19:18:45.001206 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:44Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.006104 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.006182 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.006197 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.006220 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.006232 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: E1007 19:18:45.025984 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:45Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.031895 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.031956 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.031969 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.031988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.032282 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: E1007 19:18:45.049892 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:45Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.054560 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.054607 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.054624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.054647 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.054662 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: E1007 19:18:45.069632 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:45Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.074806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.074857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.074869 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.074891 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.075212 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: E1007 19:18:45.097382 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:45Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:45 crc kubenswrapper[4813]: E1007 19:18:45.097540 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.099487 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.099518 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.099726 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.099743 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.099755 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.202704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.202772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.202794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.202823 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.202844 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.305655 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.305721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.305742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.305770 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.305791 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.408393 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.408454 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.408477 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.408506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.408529 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.511828 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.511903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.511931 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.511963 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.511986 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.614367 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.614423 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.614440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.614465 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.614484 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.716884 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.716957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.716991 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.717021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.717038 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.819112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.819148 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.819159 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.819174 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.819184 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.921883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.921954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.921976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.922005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:45 crc kubenswrapper[4813]: I1007 19:18:45.922041 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:45Z","lastTransitionTime":"2025-10-07T19:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.024359 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.024390 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.024398 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.024410 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.024419 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.127530 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.127590 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.127606 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.127633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.127651 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.230531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.230586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.230601 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.230626 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.230642 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.334355 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.334397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.334407 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.334423 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.334433 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.345857 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.346000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.346033 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.346192 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.346249 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:19:18.34623215 +0000 UTC m=+84.424487761 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.346483 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:19:18.346473046 +0000 UTC m=+84.424728657 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.346525 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.346556 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:19:18.346547538 +0000 UTC m=+84.424803149 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.439463 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.439517 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.439534 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.439559 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.439577 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.447301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.447414 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447499 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447532 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447550 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447562 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447583 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447597 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447624 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:19:18.447601622 +0000 UTC m=+84.525857243 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.447648 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:19:18.447637103 +0000 UTC m=+84.525892724 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.542758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.542813 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.542829 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.542852 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.542869 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.601760 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.602085 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.601854 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.602362 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.601854 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.602614 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.601854 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:46 crc kubenswrapper[4813]: E1007 19:18:46.602858 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.645750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.646156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.646357 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.646530 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.646663 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.749129 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.749627 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.749822 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.749968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.750107 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.851900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.851940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.851959 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.851980 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.851997 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.954912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.954970 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.954988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.955013 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:46 crc kubenswrapper[4813]: I1007 19:18:46.955030 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:46Z","lastTransitionTime":"2025-10-07T19:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.058050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.058094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.058103 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.058120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.058131 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.160898 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.160930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.160939 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.160954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.160964 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.263885 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.263922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.263930 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.263945 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.263965 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.366679 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.366775 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.366794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.366822 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.366840 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.445816 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.465008 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.471490 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.471533 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.471551 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.471574 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.471589 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.472284 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.489352 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.506032 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.526633 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.549349 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.569604 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.574366 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.574414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.574432 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.574457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.574474 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.588965 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.612121 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.633745 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.650744 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.667801 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.676619 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.676896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.677024 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.677140 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.677249 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.682948 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.713224 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.747982 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.767471 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.779076 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.779627 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.779655 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.779663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.779676 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.779685 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.791658 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:47Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.881855 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.881911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.881928 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.881952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.881969 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.985727 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.985787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.985803 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.985827 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:47 crc kubenswrapper[4813]: I1007 19:18:47.985844 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:47Z","lastTransitionTime":"2025-10-07T19:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.088460 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.088527 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.088541 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.088565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.088580 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.191517 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.191563 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.191575 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.191594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.191607 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.294525 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.294572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.294589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.294611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.294627 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.397873 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.397933 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.397954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.397981 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.398000 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.500229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.500285 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.500301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.500342 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.500358 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.601523 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.601595 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.601695 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:48 crc kubenswrapper[4813]: E1007 19:18:48.601777 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.601920 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:48 crc kubenswrapper[4813]: E1007 19:18:48.602122 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:48 crc kubenswrapper[4813]: E1007 19:18:48.602159 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:48 crc kubenswrapper[4813]: E1007 19:18:48.602384 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.602940 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.603007 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.603029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.603058 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.603079 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.705704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.705759 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.705769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.705805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.705819 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.808712 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.808771 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.808788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.808813 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.808835 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.912021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.912104 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.912122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.912144 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:48 crc kubenswrapper[4813]: I1007 19:18:48.912191 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:48Z","lastTransitionTime":"2025-10-07T19:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.015171 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.015258 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.015274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.015299 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.015359 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.118762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.118844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.118868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.118927 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.118949 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.221795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.221925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.221952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.221981 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.222002 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.270871 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.289391 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.312583 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.325606 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.325661 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.325684 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.325711 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.325733 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.329853 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.350751 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.371992 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.394368 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.412154 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.424361 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.428428 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.428473 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.428488 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.428512 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.428529 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.453491 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.511176 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.531023 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.531316 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.531411 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.531527 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.531623 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.551782 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.566309 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.577743 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.594931 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.605338 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.613605 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.625708 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.633498 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.633704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.633868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.633977 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.634079 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.638970 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:49Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.737145 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.737208 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.737684 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.737737 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.737760 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.839792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.839923 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.839941 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.839963 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.839977 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.944746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.945638 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.945766 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.945932 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:49 crc kubenswrapper[4813]: I1007 19:18:49.946063 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:49Z","lastTransitionTime":"2025-10-07T19:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.048710 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.049037 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.049164 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.049301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.049525 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.153218 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.153265 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.153283 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.153306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.153363 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.256773 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.256843 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.256868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.256905 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.256931 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.360146 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.360238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.360267 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.360298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.360361 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.463298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.463395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.463417 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.463445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.463466 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.565818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.566501 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.566537 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.566566 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.566586 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.602537 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.602607 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.602560 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:50 crc kubenswrapper[4813]: E1007 19:18:50.602676 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:50 crc kubenswrapper[4813]: E1007 19:18:50.602752 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:50 crc kubenswrapper[4813]: E1007 19:18:50.602821 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.602896 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:50 crc kubenswrapper[4813]: E1007 19:18:50.602960 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.669736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.669777 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.669788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.669803 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.669814 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.772721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.772776 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.772792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.772815 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.772832 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.875274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.875737 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.876007 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.876247 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.876498 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.979615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.979946 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.980092 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.980382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:50 crc kubenswrapper[4813]: I1007 19:18:50.980577 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:50Z","lastTransitionTime":"2025-10-07T19:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.083793 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.083854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.083872 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.083896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.083912 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.186691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.186759 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.186784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.186817 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.186844 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.290412 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.290523 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.290551 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.290580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.290602 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.395196 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.395256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.395278 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.395310 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.395369 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.498190 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.498452 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.498546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.498619 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.498678 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.600799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.601336 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.601429 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.601521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.601611 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.705317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.705450 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.705473 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.705500 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.705519 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.808168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.808533 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.808685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.808850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.808995 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.911848 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.911890 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.911901 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.911918 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:51 crc kubenswrapper[4813]: I1007 19:18:51.911929 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:51Z","lastTransitionTime":"2025-10-07T19:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.015510 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.015585 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.015608 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.015636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.015661 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.117984 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.118399 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.118576 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.118834 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.118998 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.222970 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.223279 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.223508 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.223728 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.223927 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.326982 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.327546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.327800 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.328025 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.328246 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.431699 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.432104 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.432261 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.432457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.432601 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.536148 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.536211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.536228 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.536251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.536268 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.602261 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.602409 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.603272 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.603508 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:52 crc kubenswrapper[4813]: E1007 19:18:52.603635 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:52 crc kubenswrapper[4813]: E1007 19:18:52.603508 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:52 crc kubenswrapper[4813]: E1007 19:18:52.604031 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:52 crc kubenswrapper[4813]: E1007 19:18:52.604170 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.639705 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.639765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.639783 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.639822 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.639842 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.742602 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.742639 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.742650 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.742666 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.742680 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.845317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.845786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.845999 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.846201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.846433 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.949787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.949854 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.949877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.949904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:52 crc kubenswrapper[4813]: I1007 19:18:52.949924 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:52Z","lastTransitionTime":"2025-10-07T19:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.052802 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.052997 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.053017 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.053044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.053214 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.157225 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.157296 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.157359 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.157396 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.157435 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.260230 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.260269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.260286 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.260309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.260356 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.362425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.362472 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.362486 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.362506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.362520 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.465113 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.465481 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.465654 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.465830 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.465975 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.569201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.569760 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.569949 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.570088 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.570244 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.673105 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.673159 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.673178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.673201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.673218 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.775735 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.776004 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.776163 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.776304 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.776504 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.879558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.880035 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.880439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.880796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.881114 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.983998 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.984301 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.984677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.985022 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:53 crc kubenswrapper[4813]: I1007 19:18:53.985306 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:53Z","lastTransitionTime":"2025-10-07T19:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.088445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.088522 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.088542 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.088570 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.088588 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.192027 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.192073 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.192084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.192103 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.192116 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.294806 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.295112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.295215 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.295356 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.295486 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.398304 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.398369 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.398382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.398395 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.398404 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.501050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.501140 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.501159 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.501189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.501206 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.601725 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.601731 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.601869 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:54 crc kubenswrapper[4813]: E1007 19:18:54.601953 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.602049 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:54 crc kubenswrapper[4813]: E1007 19:18:54.602176 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:54 crc kubenswrapper[4813]: E1007 19:18:54.602212 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:54 crc kubenswrapper[4813]: E1007 19:18:54.602275 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.602633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.602677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.602689 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.602702 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.602712 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.637117 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.652277 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.667274 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.683205 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.699199 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.704749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.704775 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.704783 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.704796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.704804 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.713031 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.725415 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.737718 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.750787 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.765138 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.775833 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.787504 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.804108 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.806429 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.806476 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.806524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.806548 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.806565 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.821129 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.833993 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.848618 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.862151 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.888793 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:54Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.909190 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.909274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.909292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.909315 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:54 crc kubenswrapper[4813]: I1007 19:18:54.909360 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:54Z","lastTransitionTime":"2025-10-07T19:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.011719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.011764 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.011775 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.011794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.011806 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.104080 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.104122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.104135 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.104153 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.104165 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: E1007 19:18:55.126554 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:55Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.131693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.131756 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.131773 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.131796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.131812 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: E1007 19:18:55.151894 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:55Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.156968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.157028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.157046 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.157069 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.157085 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: E1007 19:18:55.181803 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:55Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.187514 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.187607 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.187625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.187688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.187711 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: E1007 19:18:55.207292 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:55Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.212489 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.212555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.212581 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.212612 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.212637 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: E1007 19:18:55.234416 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:18:55Z is after 2025-08-24T17:21:41Z" Oct 07 19:18:55 crc kubenswrapper[4813]: E1007 19:18:55.234941 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.237604 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.237686 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.237706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.237740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.237760 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.340632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.341007 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.341022 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.341044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.341056 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.443496 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.443571 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.443583 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.443601 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.443617 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.546718 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.546823 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.546848 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.546879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.546902 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.649905 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.649947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.649957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.649973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.649985 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.753360 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.753404 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.753420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.753443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.753460 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.856119 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.856193 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.856218 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.856246 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.856262 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.960175 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.960236 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.960253 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.960279 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:55 crc kubenswrapper[4813]: I1007 19:18:55.960296 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:55Z","lastTransitionTime":"2025-10-07T19:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.063108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.063169 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.063185 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.063241 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.063260 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.166781 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.166837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.166847 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.166862 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.166872 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.269915 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.270045 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.270065 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.270093 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.270144 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.373531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.373633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.373655 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.373681 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.373701 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.476521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.476574 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.476589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.476612 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.476628 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.578908 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.578969 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.578979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.578994 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.579003 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.602554 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:56 crc kubenswrapper[4813]: E1007 19:18:56.602681 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.602553 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.602706 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.603143 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:56 crc kubenswrapper[4813]: E1007 19:18:56.603281 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:56 crc kubenswrapper[4813]: E1007 19:18:56.603453 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:56 crc kubenswrapper[4813]: E1007 19:18:56.603594 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.603738 4813 scope.go:117] "RemoveContainer" containerID="6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd" Oct 07 19:18:56 crc kubenswrapper[4813]: E1007 19:18:56.604060 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.681515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.681555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.681566 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.681608 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.681666 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.784399 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.784452 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.784468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.784491 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.784507 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.887311 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.887432 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.887448 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.887471 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.887487 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.989818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.989916 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.989933 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.989957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:56 crc kubenswrapper[4813]: I1007 19:18:56.989973 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:56Z","lastTransitionTime":"2025-10-07T19:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.092235 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.092283 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.092292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.092310 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.092333 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.195165 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.195211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.195222 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.195238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.195250 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.319005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.319078 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.319094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.319120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.319144 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.422457 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.422513 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.422527 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.422547 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.422559 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.525710 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.525758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.525772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.525794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.525807 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.628955 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.629071 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.629090 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.629114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.629130 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.732192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.732239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.732247 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.732261 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.732270 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.834104 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.834148 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.834158 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.834178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.834188 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.936465 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.936543 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.936566 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.936595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:57 crc kubenswrapper[4813]: I1007 19:18:57.936619 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:57Z","lastTransitionTime":"2025-10-07T19:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.039656 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.039984 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.040053 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.040078 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.040154 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.142675 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.142712 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.142723 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.142740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.142752 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.245668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.245712 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.245727 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.245746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.245760 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.348944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.348995 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.349011 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.349076 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.349095 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.452894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.452960 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.452985 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.453009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.453026 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.556803 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.556865 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.556887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.556914 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.556935 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.601886 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.601982 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:18:58 crc kubenswrapper[4813]: E1007 19:18:58.602222 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:18:58 crc kubenswrapper[4813]: E1007 19:18:58.602386 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.602984 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:18:58 crc kubenswrapper[4813]: E1007 19:18:58.603280 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.603488 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:18:58 crc kubenswrapper[4813]: E1007 19:18:58.603796 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.660528 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.660565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.660577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.660593 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.660603 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.762114 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.762152 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.762162 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.762178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.762187 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.864051 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.864093 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.864102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.864117 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.864127 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.966846 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.966881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.966889 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.966903 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:58 crc kubenswrapper[4813]: I1007 19:18:58.966912 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:58Z","lastTransitionTime":"2025-10-07T19:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.069247 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.069306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.069361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.069384 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.069449 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.172391 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.172431 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.172440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.172456 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.172465 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.274384 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.274430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.274440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.274459 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.274480 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.376685 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.376790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.376809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.376876 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.376894 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.479677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.479731 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.479749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.479773 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.479789 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.582191 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.582238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.582252 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.582269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.582281 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.685005 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.685040 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.685048 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.685062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.685071 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.787487 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.787545 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.787562 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.787586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.787603 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.890238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.890307 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.890355 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.890377 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.890388 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.993779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.993820 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.993831 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.993847 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:18:59 crc kubenswrapper[4813]: I1007 19:18:59.993859 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:18:59Z","lastTransitionTime":"2025-10-07T19:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.096373 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.096420 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.096430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.096445 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.096456 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.199168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.199205 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.199213 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.199249 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.199263 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.301887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.301944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.301955 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.301971 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.301980 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.404716 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.404761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.404772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.404788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.404806 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.507779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.507811 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.507842 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.507857 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.507868 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.588163 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:00 crc kubenswrapper[4813]: E1007 19:19:00.588429 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:19:00 crc kubenswrapper[4813]: E1007 19:19:00.588518 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:19:32.588497093 +0000 UTC m=+98.666752764 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.602416 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.602454 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.602433 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:00 crc kubenswrapper[4813]: E1007 19:19:00.602549 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.602429 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:00 crc kubenswrapper[4813]: E1007 19:19:00.602653 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:00 crc kubenswrapper[4813]: E1007 19:19:00.602719 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:00 crc kubenswrapper[4813]: E1007 19:19:00.602824 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.609834 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.609902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.609918 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.609941 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.609958 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.712594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.712648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.712664 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.712688 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.712707 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.814735 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.814784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.814796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.814813 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.814824 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.916765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.916807 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.916821 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.916838 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:00 crc kubenswrapper[4813]: I1007 19:19:00.916851 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:00Z","lastTransitionTime":"2025-10-07T19:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.019511 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.019571 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.019582 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.019599 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.019611 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.121777 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.121834 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.121848 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.121869 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.121884 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.224810 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.224858 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.224870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.224886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.224897 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.327529 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.327576 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.327586 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.327602 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.327612 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.430242 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.430289 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.430299 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.430314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.430344 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.533799 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.533836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.533845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.533859 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.533869 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.636939 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.636973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.636985 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.637003 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.637014 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.738889 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.738956 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.738974 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.738998 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.739015 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.841914 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.841952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.841961 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.841975 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.841985 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.943830 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.944275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.944390 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.944502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:01 crc kubenswrapper[4813]: I1007 19:19:01.944584 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:01Z","lastTransitionTime":"2025-10-07T19:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.047432 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.047624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.047704 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.047773 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.047849 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.150468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.150540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.150557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.150584 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.150605 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.253515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.253588 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.253611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.253635 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.253652 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.355728 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.355967 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.356037 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.356117 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.356197 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.458722 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.458921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.459012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.459092 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.459159 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.562565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.562624 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.562640 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.562663 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.562679 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.601816 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.601987 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.601905 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:02 crc kubenswrapper[4813]: E1007 19:19:02.602018 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.601889 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:02 crc kubenswrapper[4813]: E1007 19:19:02.602300 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:02 crc kubenswrapper[4813]: E1007 19:19:02.602419 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:02 crc kubenswrapper[4813]: E1007 19:19:02.602498 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.665223 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.665252 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.665261 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.665288 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.665300 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.768308 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.768368 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.768381 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.768400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.768409 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.870788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.870827 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.870837 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.870851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.870864 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.972756 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.973361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.973374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.973387 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:02 crc kubenswrapper[4813]: I1007 19:19:02.973395 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:02Z","lastTransitionTime":"2025-10-07T19:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.076677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.076698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.076706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.076718 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.076726 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.085284 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/0.log" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.085322 4813 generic.go:334] "Generic (PLEG): container finished" podID="76e24ee5-81b1-4538-aca5-141e399e32e9" containerID="6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97" exitCode=1 Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.085358 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerDied","Data":"6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.085648 4813 scope.go:117] "RemoveContainer" containerID="6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.105393 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.117566 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.127573 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.136839 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.148449 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.160212 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.171603 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.179237 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.179267 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.179276 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.179290 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.179300 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.183525 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.191831 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.208111 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.229481 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.248675 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.263707 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.273870 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.281693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.281732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.281744 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.281758 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.281769 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.287881 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.299113 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.307891 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.319765 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:03Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.384155 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.384192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.384202 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.384239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.384269 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.486574 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.486615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.486630 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.486652 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.486670 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.588515 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.588542 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.588551 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.588564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.588572 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.691649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.691683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.691692 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.691705 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.691714 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.794534 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.794571 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.794580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.794594 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.794604 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.897016 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.897047 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.897058 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.897072 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:03 crc kubenswrapper[4813]: I1007 19:19:03.897081 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:03Z","lastTransitionTime":"2025-10-07T19:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.000186 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.000217 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.000224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.000238 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.000247 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.090674 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/0.log" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.090723 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerStarted","Data":"42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.101746 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.101828 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.101849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.101871 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.101911 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.106927 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.139003 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.156993 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.169316 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.180216 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.193525 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.204178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.204535 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.204690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.204211 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.204844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.205268 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.215925 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.225280 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.235461 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.245295 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.255937 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.267472 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.278217 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.300644 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.308111 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.308176 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.308189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.308208 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.308221 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.314430 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.326761 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.340087 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.410631 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.410665 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.410673 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.410705 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.410715 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.512303 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.512352 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.512361 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.512374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.512383 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.602157 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.602197 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.602236 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:04 crc kubenswrapper[4813]: E1007 19:19:04.602310 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.602504 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:04 crc kubenswrapper[4813]: E1007 19:19:04.602507 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:04 crc kubenswrapper[4813]: E1007 19:19:04.602573 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:04 crc kubenswrapper[4813]: E1007 19:19:04.602665 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.615562 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.615597 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.615606 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.615621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.615630 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.619290 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.631562 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.645164 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.658048 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.668372 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.693533 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.711395 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.718251 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.718306 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.718358 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.718389 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.718412 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.733533 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.750756 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.762181 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.777638 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.791282 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.803912 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.814967 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.820576 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.820607 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.820615 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.820636 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.820645 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.827102 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.840362 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.852681 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.865579 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:04Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.922931 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.922981 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.922993 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.923012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:04 crc kubenswrapper[4813]: I1007 19:19:04.923025 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:04Z","lastTransitionTime":"2025-10-07T19:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.025994 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.026064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.026087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.026117 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.026585 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.129732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.129778 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.129788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.129805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.129815 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.232110 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.232156 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.232170 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.232188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.232200 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.309849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.309881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.309890 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.309904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.309914 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: E1007 19:19:05.329569 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:05Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.332716 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.332762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.332776 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.332795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.332809 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: E1007 19:19:05.349055 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:05Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.351764 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.351788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.351796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.351809 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.351820 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: E1007 19:19:05.362593 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:05Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.366849 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.366887 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.366897 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.366912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.366922 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: E1007 19:19:05.379731 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:05Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.383211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.383252 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.383262 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.383277 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.383288 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: E1007 19:19:05.394944 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:05Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:05 crc kubenswrapper[4813]: E1007 19:19:05.395065 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.396884 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.396934 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.396945 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.396962 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.396972 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.499711 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.499957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.500024 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.500096 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.500155 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.603275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.603350 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.603363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.603379 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.603391 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.705629 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.705750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.705774 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.705804 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.705824 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.808034 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.808084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.808094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.808113 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.808124 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.910439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.910502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.910519 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.910549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:05 crc kubenswrapper[4813]: I1007 19:19:05.910579 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:05Z","lastTransitionTime":"2025-10-07T19:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.013017 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.013050 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.013061 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.013076 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.013088 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.115212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.115255 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.115267 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.115284 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.115296 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.217147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.217180 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.217188 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.217201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.217210 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.319957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.320019 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.320036 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.320061 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.320078 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.422516 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.422557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.422573 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.422595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.422612 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.524794 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.524836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.524867 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.524883 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.524896 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.602412 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.602480 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.602553 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:06 crc kubenswrapper[4813]: E1007 19:19:06.602699 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.602895 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:06 crc kubenswrapper[4813]: E1007 19:19:06.602973 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:06 crc kubenswrapper[4813]: E1007 19:19:06.603125 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:06 crc kubenswrapper[4813]: E1007 19:19:06.603275 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.626471 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.627099 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.627221 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.627250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.627273 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.739170 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.739224 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.739240 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.739263 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.739280 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.841713 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.841750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.841761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.841779 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.841792 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.944443 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.944504 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.944521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.944545 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:06 crc kubenswrapper[4813]: I1007 19:19:06.944561 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:06Z","lastTransitionTime":"2025-10-07T19:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.047325 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.047384 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.047394 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.047409 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.047420 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.149126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.149189 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.149207 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.149232 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.149262 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.252042 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.252093 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.252105 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.252126 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.252144 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.354955 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.355012 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.355044 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.355066 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.355079 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.459318 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.459382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.459391 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.459412 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.459425 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.562392 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.562458 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.562469 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.562489 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.562500 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.603223 4813 scope.go:117] "RemoveContainer" containerID="6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.665811 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.665925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.665949 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.665985 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.666008 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.768525 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.768569 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.768581 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.768602 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.768615 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.870845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.870881 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.870891 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.870907 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.870921 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.972850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.972894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.972905 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.972920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:07 crc kubenswrapper[4813]: I1007 19:19:07.972933 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:07Z","lastTransitionTime":"2025-10-07T19:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.075463 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.075507 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.075519 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.075542 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.075553 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.104519 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/2.log" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.107001 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.108835 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.119156 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.135708 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.147522 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.158588 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.166957 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.177454 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.177494 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.177504 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.177524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.177536 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.179583 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.193479 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.206381 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.219388 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.229193 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.244825 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.257872 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.271713 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.279649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.279681 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.279689 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.279702 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.279712 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.283009 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.306355 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.319465 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.330669 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.341135 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:08Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.410599 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.410644 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.410658 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.410683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.410695 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.512162 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.512401 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.512468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.512527 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.512580 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.601700 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:08 crc kubenswrapper[4813]: E1007 19:19:08.602048 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.601821 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:08 crc kubenswrapper[4813]: E1007 19:19:08.602217 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.601740 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:08 crc kubenswrapper[4813]: E1007 19:19:08.602441 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.601868 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:08 crc kubenswrapper[4813]: E1007 19:19:08.602615 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.614621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.614687 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.614709 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.614736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.614756 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.717300 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.717536 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.717631 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.717694 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.717759 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.821350 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.821589 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.821699 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.821864 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.821981 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.924137 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.924372 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.924508 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.924625 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:08 crc kubenswrapper[4813]: I1007 19:19:08.924743 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:08Z","lastTransitionTime":"2025-10-07T19:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.027364 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.027590 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.027651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.027744 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.027826 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.112997 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/3.log" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.113917 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/2.log" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.118038 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" exitCode=1 Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.118088 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.118141 4813 scope.go:117] "RemoveContainer" containerID="6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.119763 4813 scope.go:117] "RemoveContainer" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" Oct 07 19:19:09 crc kubenswrapper[4813]: E1007 19:19:09.120368 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.132790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.132986 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.133167 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.133452 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.133760 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.145705 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.166439 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.183713 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.199032 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.216351 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.237257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.237312 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.237363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.237394 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.237416 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.238511 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.258657 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.276274 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.292663 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.321386 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c68f994c3dd870df1b06dacee0ebeb83c87b269ecad0281b395b67b89800ddd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:18:42Z\\\",\\\"message\\\":\\\"},},NodeRuntimeHandler{Name:,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*true,},},NodeRuntimeHandler{Name:runc,Features:\\\\u0026NodeRuntimeHandlerFeatures{RecursiveReadOnlyMounts:*true,UserNamespaces:*false,},},},Features:nil,},}\\\\nI1007 19:18:42.749278 6384 egressqos.go:1009] Finished syncing EgressQoS node crc : 848.333µs\\\\nI1007 19:18:42.740933 6384 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1007 19:18:42.740953 6384 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741017 6384 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741070 6384 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.741085 6384 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1007 19:18:42.751751 6384 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:18:42.751806 6384 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1007 19:18:42.751919 6384 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:41Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:08Z\\\",\\\"message\\\":\\\"val\\\\nI1007 19:19:08.509797 6721 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:19:08.509811 6721 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 19:19:08.509816 6721 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 19:19:08.509834 6721 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 19:19:08.509855 6721 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1007 19:19:08.509863 6721 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1007 19:19:08.509880 6721 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1007 19:19:08.510200 6721 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:19:08.510235 6721 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:19:08.510232 6721 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 19:19:08.510259 6721 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 19:19:08.510273 6721 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 19:19:08.510336 6721 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1007 19:19:08.510386 6721 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 19:19:08.510454 6721 factory.go:656] Stopping watch factory\\\\nI1007 19:19:08.510469 6721 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:19:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.341481 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.341913 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.342054 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.342204 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.342363 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.348470 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.369111 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.385663 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.403963 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.436214 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.444995 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.445064 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.445081 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.445107 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.445123 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.461422 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.482694 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.498736 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:09Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.548123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.548197 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.548211 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.548236 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.548251 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.617728 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.650879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.651056 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.651136 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.651217 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.651304 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.753545 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.753611 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.753633 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.753664 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.753689 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.855962 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.855996 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.856013 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.856029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.856039 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.959561 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.959872 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.960068 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.960295 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:09 crc kubenswrapper[4813]: I1007 19:19:09.960801 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:09Z","lastTransitionTime":"2025-10-07T19:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.063273 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.063638 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.063829 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.063987 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.064169 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.125057 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/3.log" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.131385 4813 scope.go:117] "RemoveContainer" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" Oct 07 19:19:10 crc kubenswrapper[4813]: E1007 19:19:10.131621 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.147868 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52d4b6b1-572a-43b1-8453-4381a7c4872a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a38d5db77ffa8a2032673e8d879927c1d10d9bdc91969b1c58b7ca1583fda848\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e095b845cf48b87ddbd052291f606128d827fdfce6a535afdff9e00635229f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e095b845cf48b87ddbd052291f606128d827fdfce6a535afdff9e00635229f50\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.167785 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.167836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.167853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.167877 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.167897 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.168177 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.192393 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.210494 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.229231 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.253263 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.271030 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.271120 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.271136 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.271160 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.271178 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.276895 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.301540 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.323469 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.343786 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.374271 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.374315 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.374342 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.374359 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.374371 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.374511 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:08Z\\\",\\\"message\\\":\\\"val\\\\nI1007 19:19:08.509797 6721 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:19:08.509811 6721 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 19:19:08.509816 6721 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 19:19:08.509834 6721 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 19:19:08.509855 6721 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1007 19:19:08.509863 6721 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1007 19:19:08.509880 6721 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1007 19:19:08.510200 6721 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:19:08.510235 6721 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:19:08.510232 6721 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 19:19:08.510259 6721 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 19:19:08.510273 6721 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 19:19:08.510336 6721 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1007 19:19:08.510386 6721 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 19:19:08.510454 6721 factory.go:656] Stopping watch factory\\\\nI1007 19:19:08.510469 6721 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:19:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:19:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.409137 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.432630 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.454150 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.469713 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.478868 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.478920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.478939 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.478964 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.478981 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.489640 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.510115 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.527812 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.544985 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:10Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.581677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.581745 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.581768 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.581796 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.581817 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.602044 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.602089 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.602173 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.602280 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:10 crc kubenswrapper[4813]: E1007 19:19:10.602514 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:10 crc kubenswrapper[4813]: E1007 19:19:10.602660 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:10 crc kubenswrapper[4813]: E1007 19:19:10.602952 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:10 crc kubenswrapper[4813]: E1007 19:19:10.603140 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.684522 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.684613 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.684629 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.684652 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.684672 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.787658 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.787715 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.787750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.787773 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.787789 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.891039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.891101 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.891123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.891153 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.891176 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.995788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.995835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.995858 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.995879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:10 crc kubenswrapper[4813]: I1007 19:19:10.995894 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:10Z","lastTransitionTime":"2025-10-07T19:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.098256 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.098302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.098311 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.098339 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.098351 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.200979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.201014 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.201022 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.201034 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.201043 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.302874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.302912 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.302943 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.302962 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.302972 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.406316 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.406388 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.406399 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.406417 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.406428 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.516377 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.516448 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.516468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.516495 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.516516 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.620446 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.620525 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.620637 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.620672 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.620695 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.725654 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.725721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.725740 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.725765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.725783 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.828800 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.828859 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.828875 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.828900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.828918 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.933096 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.933160 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.933177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.933203 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:11 crc kubenswrapper[4813]: I1007 19:19:11.933221 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:11Z","lastTransitionTime":"2025-10-07T19:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.036406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.036500 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.036525 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.036562 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.036596 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.139845 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.139925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.139950 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.139978 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.139999 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.242698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.242784 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.242823 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.242856 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.242876 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.346641 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.346773 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.346805 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.346835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.346858 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.449795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.449904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.449921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.449953 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.449969 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.552564 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.552632 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.552649 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.552677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.552693 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.602482 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.602549 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:12 crc kubenswrapper[4813]: E1007 19:19:12.602660 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.602679 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:12 crc kubenswrapper[4813]: E1007 19:19:12.602789 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.602856 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:12 crc kubenswrapper[4813]: E1007 19:19:12.602951 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:12 crc kubenswrapper[4813]: E1007 19:19:12.603063 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.655140 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.655644 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.655821 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.656016 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.656171 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.758886 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.758929 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.758944 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.758965 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.758978 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.861721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.861755 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.861765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.861780 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.861790 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.964386 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.964415 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.964425 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.964439 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:12 crc kubenswrapper[4813]: I1007 19:19:12.964450 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:12Z","lastTransitionTime":"2025-10-07T19:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.067296 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.067355 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.067366 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.067382 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.067393 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.169591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.169660 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.169674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.169692 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.169703 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.271996 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.272039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.272047 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.272062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.272072 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.374935 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.375307 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.375497 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.375645 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.375770 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.479509 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.479579 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.479603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.479635 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.479656 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.582074 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.582116 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.582127 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.582142 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.582153 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.685212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.685274 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.685292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.685317 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.685362 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.787653 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.787711 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.787724 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.787742 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.787756 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.891013 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.891078 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.891098 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.891125 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.891142 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.994864 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.994952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.994970 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.994993 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:13 crc kubenswrapper[4813]: I1007 19:19:13.995014 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:13Z","lastTransitionTime":"2025-10-07T19:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.098067 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.098133 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.098151 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.098177 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.098194 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.200749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.200850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.200873 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.200897 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.200914 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.304603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.304651 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.304660 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.304674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.304683 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.410690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.411051 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.411227 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.411440 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.411611 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.514865 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.515231 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.515430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.515629 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.515801 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.601594 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.601730 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:14 crc kubenswrapper[4813]: E1007 19:19:14.602468 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.601842 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.601821 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:14 crc kubenswrapper[4813]: E1007 19:19:14.602610 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:14 crc kubenswrapper[4813]: E1007 19:19:14.602740 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:14 crc kubenswrapper[4813]: E1007 19:19:14.602485 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.619360 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.619408 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.619424 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.619447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.619463 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.628882 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c40493cc-d48f-45d6-b219-ab8078e01723\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5ce26b9e6e70a18e4ae5f7399d611f18f2e79445839a777cd1de99f39f335941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96396ed23910785991f4bd1cc8b91805051715d9ff13c461e86e70ec7e36858\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2028f6bd6540e8aeec593834771e775993d334fbeb6c394ef344bcb0c47c40c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f13b19ee756d3cc0e13275bd67f5fa48406e7a59858238431690cebbad942a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf457416203eb13ebbb3dd2e3a5703d719664d49da9e83b92fbe65786d6ef595\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"file observer\\\\nW1007 19:18:14.107060 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1007 19:18:14.107237 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1007 19:18:14.112151 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-691316833/tls.crt::/tmp/serving-cert-691316833/tls.key\\\\\\\"\\\\nI1007 19:18:14.555705 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1007 19:18:14.561545 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1007 19:18:14.562014 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1007 19:18:14.562043 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1007 19:18:14.562050 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1007 19:18:14.578791 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1007 19:18:14.578816 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578821 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1007 19:18:14.578826 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1007 19:18:14.578831 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1007 19:18:14.578835 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1007 19:18:14.578841 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1007 19:18:14.579015 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1007 19:18:14.580615 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d1b2a7d47f52e235d524d3968572a3e24a81ce05d9fd0e2f29666694ac6501\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ef9e1106273ed879537b3f9a6265e1bc72dc92a0a659a8f8a96dcb588cef4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.651316 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.672129 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714978edd7810bd7816ca609e3005aa9fd4b256f53563bebecfc250392d8b940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90584729d935465d37b5e6182d4126ba83b8958c5f7a1636d74ba9e0ac9e1624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.693842 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.715537 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"537f8a53-dde4-4808-a822-9d8c922a8499\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2edc5ddb595748bb509f8737bdd9e38cbac963c92055f21c63c739aa8560b737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4nmf4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gcfdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.721879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.721922 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.721939 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.721966 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.721983 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.748479 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:08Z\\\",\\\"message\\\":\\\"val\\\\nI1007 19:19:08.509797 6721 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1007 19:19:08.509811 6721 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1007 19:19:08.509816 6721 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1007 19:19:08.509834 6721 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1007 19:19:08.509855 6721 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1007 19:19:08.509863 6721 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1007 19:19:08.509880 6721 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1007 19:19:08.510200 6721 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1007 19:19:08.510235 6721 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1007 19:19:08.510232 6721 handler.go:208] Removed *v1.Node event handler 7\\\\nI1007 19:19:08.510259 6721 handler.go:208] Removed *v1.Node event handler 2\\\\nI1007 19:19:08.510273 6721 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1007 19:19:08.510336 6721 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1007 19:19:08.510386 6721 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1007 19:19:08.510454 6721 factory.go:656] Stopping watch factory\\\\nI1007 19:19:08.510469 6721 ovnkube.go:599] Stopped ovnkube\\\\nI1007 19:19:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:19:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pmnhk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-vvpdd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.783516 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"549c890f-26b6-42f1-83f5-236434f99d08\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45a004dcec8833f431b0ac11cf296f0026c365b0ca4b3c37d70f8c7e6db0b684\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7035cf478a928e1d72de9c2c33ee590611cda13c80f8ef31d2523eb769d90ee5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbedb48e19f8cdfdbc4c937cb7cea5046331d1ef696469e2f1c435a2c19fd9f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f509cd0079749aaa75e3c5ac6158924e72fa56bef62159a0450e1952afee21f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0661af94d814d3c48f47479963775e999bce6457bec9bcc374028b753782aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://981b3b98c0fae5a4bb646685dfa9c0c35cbe41b5cc67dc1f1aae55c48dc3f19c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2f816c799940e9634f17f34ddfe2e214eaab731c08b335e21a2b98f3b2b67846\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f261756b9deb39617fc632f369f538fd69c4de2e47e2bb04d1ab4f61bd8af186\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.807238 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9848a8f6-34ef-49f2-8263-067fc9085072\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e02d39587f436f40e81d40db931d149fd6d6790e099902044da5192f4813752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://acbfbbb85bd86493efa2c7e6f3e4d5f6e5ffe61c7697eda6bf571df57e487982\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7f28f45ba70b488f8a76103aa45f9aba3bd2af7244885381a8dcb0a9c04804\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fef18b28698a565d12b79272bdca68d955038d826dc972377bbca8635d88667f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c20215be58b4c0f93e4c219bed64160e04fbb2fe3a1f24b359e5b1b82bbbfa0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://73bab210e0506b947c97a19041a1c18542ec638bc11f43cc3d7659c2c3695b19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf24b8bd62591b01e8217d6d1ae04fd181a386c89f71942af3bea1747e461e78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:18:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qct6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-vhdcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.825250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.825298 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.825315 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.825364 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.825383 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.829978 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbxzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76e24ee5-81b1-4538-aca5-141e399e32e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-07T19:19:02Z\\\",\\\"message\\\":\\\"2025-10-07T19:18:17+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773\\\\n2025-10-07T19:18:17+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d337092d-e487-4702-aafb-d552edda5773 to /host/opt/cni/bin/\\\\n2025-10-07T19:18:17Z [verbose] multus-daemon started\\\\n2025-10-07T19:18:17Z [verbose] Readiness Indicator file check\\\\n2025-10-07T19:19:02Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d7mnh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbxzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.848369 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-w6x2v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ea96b97-cd23-4821-9abd-abde3204afbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89a4ccf8a1fb8dec2d15db05921a4fae95a09f9852fd9635f9442ae44ae565ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-25glw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:19Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-w6x2v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.868008 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb035bab-3255-49c1-85e9-77c3fb41abdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc343773778d8a3dea67bb881f8e1b51c628b7c0086f8dd084a45ac975000468\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c25768cfebf079175de443191f4141ddeb26676cb0007b2367359242a68f4a12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9f28d29686c4483d7af3a0f24aa235ed47d14f05cd41dc14697e3d1a0b25a30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e938416192af016c8a751fb5c5626c16b4f68ea0ff54a305e8b4e49f5427e6fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.888898 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"358c946b-7402-489d-90c0-d8a7128d22e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://197070f94e3d96cd08d6a81a80f94f127a2c9ae6fadebb414003597782221f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c9c86aba890567720bc6ceb6c221f41e1470a9c13022c88ec740028922bb6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4da439867d505bfa036880c83e2dd958824094bb9b2ffcf85421e1de400d5e8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f22fe524c6ca48b09fe9c7401d55622d1f21154d545e7900e0786b5deeb2926\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.909557 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d5f7d58-e213-4c3e-a36a-c7603a593bbe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2847456f4876d0bf0ad8063c23a6fadb6ccc2cf015cc34ed080b32c2b13a8593\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://704075a2a59a9eb43e7ae894ff4d19de7a25543a4a22c3c0d1e07545f5092153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gdv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:27Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fpxxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.926851 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:28Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwtb2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:28Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nz8v5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.928142 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.928195 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.928212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.928234 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.928251 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:14Z","lastTransitionTime":"2025-10-07T19:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.948483 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"52d4b6b1-572a-43b1-8453-4381a7c4872a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:17:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a38d5db77ffa8a2032673e8d879927c1d10d9bdc91969b1c58b7ca1583fda848\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:17:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e095b845cf48b87ddbd052291f606128d827fdfce6a535afdff9e00635229f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e095b845cf48b87ddbd052291f606128d827fdfce6a535afdff9e00635229f50\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-07T19:17:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-07T19:17:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:17:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.969756 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:14 crc kubenswrapper[4813]: I1007 19:19:14.996225 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://460118e518546bde40cd5dd31806e350ca381ae2a97cde881539f2066e114290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:14Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.015505 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-j4kjg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f693a3c5-0ae5-4bf9-9e60-027f67537f9a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5704ec8b5d74e254121700f31a83eb1935693a41a503bfee3987f794b7ec0237\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9cr8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-07T19:18:14Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-j4kjg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.030166 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.030195 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.030206 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.030222 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.030233 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.034381 4813 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-07T19:18:17Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://67da5314502c5a64262ec9b96ff0ef0da5dcf793bbb6a35110b6254dc6f6bf56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-07T19:18:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.133460 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.133578 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.133595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.133622 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.133640 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.237096 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.237165 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.237184 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.237210 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.237226 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.341480 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.341910 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.341928 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.341954 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.341975 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.445002 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.445058 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.445112 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.445137 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.445156 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.549374 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.549423 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.549441 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.549521 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.550049 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.554595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.554654 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.554671 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.554693 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.554710 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: E1007 19:19:15.576173 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.581454 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.581516 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.581533 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.581558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.581575 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: E1007 19:19:15.602297 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.607728 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.607789 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.607814 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.607844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.607867 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: E1007 19:19:15.628842 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.634718 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.634769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.634786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.634812 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.634830 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: E1007 19:19:15.655530 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.660683 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.660734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.660750 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.660778 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.660799 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: E1007 19:19:15.682357 4813 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148052Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608852Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-07T19:19:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a5d0af03-2f8a-4e5c-98e8-1fba456b2042\\\",\\\"systemUUID\\\":\\\"2045744e-c123-49af-abd0-32768e95c3cd\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-07T19:19:15Z is after 2025-08-24T17:21:41Z" Oct 07 19:19:15 crc kubenswrapper[4813]: E1007 19:19:15.682588 4813 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.684792 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.684853 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.684874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.684902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.684923 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.788477 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.788550 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.788572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.788607 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.788630 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.891691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.891763 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.891774 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.891790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.891801 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.994957 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.995075 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.995102 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.995131 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:15 crc kubenswrapper[4813]: I1007 19:19:15.995154 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:15Z","lastTransitionTime":"2025-10-07T19:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.098026 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.098130 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.098185 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.098213 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.098245 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.200701 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.200749 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.200765 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.200788 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.200806 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.303732 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.303798 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.303816 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.303847 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.303868 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.408150 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.408212 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.408232 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.408257 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.408274 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.511481 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.511677 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.511744 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.511772 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.511789 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.602308 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.602583 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.602913 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.602944 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:16 crc kubenswrapper[4813]: E1007 19:19:16.602874 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:16 crc kubenswrapper[4813]: E1007 19:19:16.603112 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:16 crc kubenswrapper[4813]: E1007 19:19:16.603293 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:16 crc kubenswrapper[4813]: E1007 19:19:16.603441 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.614495 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.614541 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.614560 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.614585 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.614604 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.717892 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.717960 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.717984 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.718014 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.718038 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.821978 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.822041 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.822087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.822119 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.822138 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.933618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.933674 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.933686 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.933703 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:16 crc kubenswrapper[4813]: I1007 19:19:16.933715 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:16Z","lastTransitionTime":"2025-10-07T19:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.036004 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.036080 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.036098 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.036125 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.036142 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.138568 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.138635 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.138659 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.138690 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.138712 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.241899 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.242200 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.242291 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.242430 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.242525 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.375904 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.376357 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.376508 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.376648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.376793 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.480152 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.480201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.480217 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.480241 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.480259 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.583070 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.583130 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.583147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.583171 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.583188 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.686192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.686302 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.686321 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.686373 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.686395 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.789767 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.789900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.789925 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.789959 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.789982 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.892846 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.892900 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.892917 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.892946 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.892965 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.996687 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.996786 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.996807 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.996834 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:17 crc kubenswrapper[4813]: I1007 19:19:17.996856 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:17Z","lastTransitionTime":"2025-10-07T19:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.099618 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.099668 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.099682 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.099700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.099713 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.202973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.203368 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.203558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.203721 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.203872 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.306755 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.306826 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.306850 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.306876 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.306896 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.380747 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.380935 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.380994 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.381317 4813 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.381462 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:20:22.381430999 +0000 UTC m=+148.459686661 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.381585 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:22.381561113 +0000 UTC m=+148.459816764 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.381661 4813 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.381706 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-07 19:20:22.381693237 +0000 UTC m=+148.459948878 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.409921 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.409976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.409997 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.410029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.410052 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.481858 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.481915 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482123 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482163 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482183 4813 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482250 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-07 19:20:22.482229035 +0000 UTC m=+148.560484686 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482133 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482290 4813 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482315 4813 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.482416 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-07 19:20:22.48239738 +0000 UTC m=+148.560653031 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.512787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.512835 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.512851 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.512874 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.512890 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.602054 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.602264 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.602486 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.602619 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.602681 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.602835 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.603511 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:18 crc kubenswrapper[4813]: E1007 19:19:18.603692 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.614824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.614879 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.614896 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.614920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.614940 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.717427 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.717500 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.717523 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.717555 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.717578 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.820831 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.820911 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.820935 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.820965 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.821026 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.924151 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.924222 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.924250 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.924282 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:18 crc kubenswrapper[4813]: I1007 19:19:18.924306 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:18Z","lastTransitionTime":"2025-10-07T19:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.027502 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.027568 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.027591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.027621 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.027643 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.130653 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.130736 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.130759 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.130787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.130810 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.234671 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.234739 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.234760 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.234789 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.234811 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.337471 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.337533 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.337561 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.337591 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.337613 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.440692 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.440768 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.440790 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.440821 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.440844 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.544028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.544085 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.544101 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.544123 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.544139 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.647380 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.647524 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.647549 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.647577 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.647598 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.750987 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.751053 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.751070 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.751094 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.751147 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.854698 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.855081 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.855098 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.855122 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.855139 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.957894 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.957952 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.957973 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.958000 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:19 crc kubenswrapper[4813]: I1007 19:19:19.958021 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:19Z","lastTransitionTime":"2025-10-07T19:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.060919 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.061186 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.061275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.061375 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.061442 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.163795 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.163824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.163833 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.163848 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.163858 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.266970 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.267033 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.267055 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.267084 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.267104 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.370578 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.370648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.370669 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.370697 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.370718 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.474127 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.474187 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.474203 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.474229 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.474248 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.577482 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.577536 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.577554 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.577579 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.577597 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.602301 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.602371 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.602424 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.602378 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:20 crc kubenswrapper[4813]: E1007 19:19:20.602509 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:20 crc kubenswrapper[4813]: E1007 19:19:20.602636 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:20 crc kubenswrapper[4813]: E1007 19:19:20.602720 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:20 crc kubenswrapper[4813]: E1007 19:19:20.602830 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.680103 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.680671 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.680832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.680981 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.681129 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.785029 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.785092 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.785109 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.785135 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.785153 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.887891 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.887997 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.888039 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.888071 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.888094 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.991353 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.991414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.991429 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.991454 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:20 crc kubenswrapper[4813]: I1007 19:19:20.991477 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:20Z","lastTransitionTime":"2025-10-07T19:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.095167 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.095231 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.095249 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.095273 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.095291 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.197660 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.197691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.197699 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.197714 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.197724 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.300870 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.300938 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.300962 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.300988 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.301010 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.403408 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.403470 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.403488 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.403513 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.403531 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.507378 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.507446 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.507468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.507493 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.507509 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.610580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.611603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.611769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.611923 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.612096 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.715546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.715920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.716108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.716371 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.716623 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.819979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.820399 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.820626 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.821035 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.821279 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.925761 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.925824 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.925843 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.925867 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:21 crc kubenswrapper[4813]: I1007 19:19:21.925884 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:21Z","lastTransitionTime":"2025-10-07T19:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.028356 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.028398 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.028406 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.028421 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.028433 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.131412 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.131771 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.131968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.132201 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.132385 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.235147 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.235239 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.235260 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.235286 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.235302 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.339085 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.339150 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.339168 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.339198 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.339215 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.441787 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.441832 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.441844 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.441862 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.441874 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.544863 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.544927 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.544979 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.545010 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.545032 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.602567 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.602653 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:22 crc kubenswrapper[4813]: E1007 19:19:22.602765 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.602817 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:22 crc kubenswrapper[4813]: E1007 19:19:22.602894 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.603006 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:22 crc kubenswrapper[4813]: E1007 19:19:22.603751 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:22 crc kubenswrapper[4813]: E1007 19:19:22.604102 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.648210 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.648275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.648292 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.648314 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.648361 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.750604 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.750706 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.750729 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.750762 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.750785 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.853347 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.853730 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.853841 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.853950 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.854052 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.956947 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.957004 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.957021 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.957045 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:22 crc kubenswrapper[4813]: I1007 19:19:22.957062 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:22Z","lastTransitionTime":"2025-10-07T19:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.059557 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.059823 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.059942 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.060027 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.060107 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.162447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.162509 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.162531 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.162558 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.162579 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.265532 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.265738 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.265836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.265902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.265969 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.369145 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.369194 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.369210 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.369232 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.369249 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.472275 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.472585 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.472691 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.472769 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.472834 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.576009 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.576089 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.576108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.576620 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.576677 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.680108 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.680178 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.680200 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.680226 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.680245 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.783095 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.783580 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.783783 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.784093 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.784406 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.887294 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.887385 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.887408 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.887436 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.887457 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.990363 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.990432 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.990453 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.990480 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:23 crc kubenswrapper[4813]: I1007 19:19:23.990497 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:23Z","lastTransitionTime":"2025-10-07T19:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.093920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.093989 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.094007 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.094030 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.094046 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.196400 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.196447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.196465 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.196489 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.196507 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.299483 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.299546 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.299572 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.299603 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.299624 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.403719 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.403785 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.403808 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.403836 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.403857 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.506946 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.506990 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.507000 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.507020 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.507031 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.602155 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:24 crc kubenswrapper[4813]: E1007 19:19:24.602282 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.602356 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.602381 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.602818 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:24 crc kubenswrapper[4813]: E1007 19:19:24.603052 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:24 crc kubenswrapper[4813]: E1007 19:19:24.603114 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:24 crc kubenswrapper[4813]: E1007 19:19:24.603312 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.603684 4813 scope.go:117] "RemoveContainer" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" Oct 07 19:19:24 crc kubenswrapper[4813]: E1007 19:19:24.604052 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.608309 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.608351 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.608359 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.608370 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.608379 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.648258 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.648231826 podStartE2EDuration="37.648231826s" podCreationTimestamp="2025-10-07 19:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.627093744 +0000 UTC m=+90.705349355" watchObservedRunningTime="2025-10-07 19:19:24.648231826 +0000 UTC m=+90.726487477" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.648976 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=67.648963106 podStartE2EDuration="1m7.648963106s" podCreationTimestamp="2025-10-07 19:18:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.647383483 +0000 UTC m=+90.725639144" watchObservedRunningTime="2025-10-07 19:19:24.648963106 +0000 UTC m=+90.727218757" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.680017 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fpxxn" podStartSLOduration=69.679987121 podStartE2EDuration="1m9.679987121s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.664121364 +0000 UTC m=+90.742377025" watchObservedRunningTime="2025-10-07 19:19:24.679987121 +0000 UTC m=+90.758242772" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.705280 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.705247526 podStartE2EDuration="15.705247526s" podCreationTimestamp="2025-10-07 19:19:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.698209292 +0000 UTC m=+90.776464953" watchObservedRunningTime="2025-10-07 19:19:24.705247526 +0000 UTC m=+90.783503147" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.711902 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.711948 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.712468 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.712511 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.712558 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.770975 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-j4kjg" podStartSLOduration=70.770953136 podStartE2EDuration="1m10.770953136s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.75549315 +0000 UTC m=+90.833748771" watchObservedRunningTime="2025-10-07 19:19:24.770953136 +0000 UTC m=+90.849208757" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.808104 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=70.808080898 podStartE2EDuration="1m10.808080898s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.792146189 +0000 UTC m=+90.870401830" watchObservedRunningTime="2025-10-07 19:19:24.808080898 +0000 UTC m=+90.886336519" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.814447 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.814506 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.814523 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.814547 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.814563 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.848231 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podStartSLOduration=70.848206063 podStartE2EDuration="1m10.848206063s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.848171322 +0000 UTC m=+90.926426953" watchObservedRunningTime="2025-10-07 19:19:24.848206063 +0000 UTC m=+90.926461704" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.916349 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.916386 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.916397 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.916414 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.916427 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:24Z","lastTransitionTime":"2025-10-07T19:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.919426 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.919412544 podStartE2EDuration="1m8.919412544s" podCreationTimestamp="2025-10-07 19:18:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.917985885 +0000 UTC m=+90.996241506" watchObservedRunningTime="2025-10-07 19:19:24.919412544 +0000 UTC m=+90.997668175" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.941157 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-vhdcn" podStartSLOduration=70.941132572 podStartE2EDuration="1m10.941132572s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.940882095 +0000 UTC m=+91.019137716" watchObservedRunningTime="2025-10-07 19:19:24.941132572 +0000 UTC m=+91.019388223" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.957709 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gbxzg" podStartSLOduration=70.957692038 podStartE2EDuration="1m10.957692038s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.95737762 +0000 UTC m=+91.035633271" watchObservedRunningTime="2025-10-07 19:19:24.957692038 +0000 UTC m=+91.035947659" Oct 07 19:19:24 crc kubenswrapper[4813]: I1007 19:19:24.973935 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-w6x2v" podStartSLOduration=69.973916735 podStartE2EDuration="1m9.973916735s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:24.973764691 +0000 UTC m=+91.052020312" watchObservedRunningTime="2025-10-07 19:19:24.973916735 +0000 UTC m=+91.052172356" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.019015 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.019057 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.019071 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.019087 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.019099 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.121968 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.122028 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.122045 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.122070 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.122088 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.225215 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.225269 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.225285 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.225307 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.225353 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.328920 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.329018 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.329038 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.329062 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.329079 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.432595 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.432648 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.432670 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.432700 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.432722 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.536667 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.536728 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.536744 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.536770 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.536786 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.639841 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.639955 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.639976 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.640002 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.640019 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.742643 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.742756 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.742814 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.742840 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.742858 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.845734 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.845800 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.845818 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.845841 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.845858 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.927460 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.927523 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.927540 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.927565 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.927581 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.955221 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.955273 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.955291 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.955313 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 07 19:19:25 crc kubenswrapper[4813]: I1007 19:19:25.955353 4813 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-07T19:19:25Z","lastTransitionTime":"2025-10-07T19:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.001991 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz"] Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.002565 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.005554 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.005656 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.006763 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.007363 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.069546 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ae8a1f4-efc6-400a-8eff-519e69027fbf-service-ca\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.069593 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5ae8a1f4-efc6-400a-8eff-519e69027fbf-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.069640 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5ae8a1f4-efc6-400a-8eff-519e69027fbf-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.069673 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae8a1f4-efc6-400a-8eff-519e69027fbf-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.069748 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ae8a1f4-efc6-400a-8eff-519e69027fbf-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171167 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae8a1f4-efc6-400a-8eff-519e69027fbf-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171240 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ae8a1f4-efc6-400a-8eff-519e69027fbf-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171275 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ae8a1f4-efc6-400a-8eff-519e69027fbf-service-ca\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171299 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5ae8a1f4-efc6-400a-8eff-519e69027fbf-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171368 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5ae8a1f4-efc6-400a-8eff-519e69027fbf-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171455 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5ae8a1f4-efc6-400a-8eff-519e69027fbf-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.171539 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5ae8a1f4-efc6-400a-8eff-519e69027fbf-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.172893 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5ae8a1f4-efc6-400a-8eff-519e69027fbf-service-ca\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.181872 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ae8a1f4-efc6-400a-8eff-519e69027fbf-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.198884 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae8a1f4-efc6-400a-8eff-519e69027fbf-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-p9gcz\" (UID: \"5ae8a1f4-efc6-400a-8eff-519e69027fbf\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.325106 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.602164 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.602302 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.602426 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:26 crc kubenswrapper[4813]: E1007 19:19:26.602582 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:26 crc kubenswrapper[4813]: I1007 19:19:26.602187 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:26 crc kubenswrapper[4813]: E1007 19:19:26.602968 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:26 crc kubenswrapper[4813]: E1007 19:19:26.603106 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:26 crc kubenswrapper[4813]: E1007 19:19:26.603862 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:27 crc kubenswrapper[4813]: I1007 19:19:27.190795 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" event={"ID":"5ae8a1f4-efc6-400a-8eff-519e69027fbf","Type":"ContainerStarted","Data":"1116585aa292835eb373489e16282cca352705909d9371db1991a434e5868bac"} Oct 07 19:19:27 crc kubenswrapper[4813]: I1007 19:19:27.190871 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" event={"ID":"5ae8a1f4-efc6-400a-8eff-519e69027fbf","Type":"ContainerStarted","Data":"d122c08130eb275bbd5756601b99804466cefcf0a5e602cee0df181057e935a0"} Oct 07 19:19:27 crc kubenswrapper[4813]: I1007 19:19:27.210380 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-p9gcz" podStartSLOduration=73.210356103 podStartE2EDuration="1m13.210356103s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:27.209699904 +0000 UTC m=+93.287955556" watchObservedRunningTime="2025-10-07 19:19:27.210356103 +0000 UTC m=+93.288611754" Oct 07 19:19:28 crc kubenswrapper[4813]: I1007 19:19:28.601573 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:28 crc kubenswrapper[4813]: I1007 19:19:28.601628 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:28 crc kubenswrapper[4813]: I1007 19:19:28.601628 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:28 crc kubenswrapper[4813]: I1007 19:19:28.601764 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:28 crc kubenswrapper[4813]: E1007 19:19:28.601912 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:28 crc kubenswrapper[4813]: E1007 19:19:28.602088 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:28 crc kubenswrapper[4813]: E1007 19:19:28.602193 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:28 crc kubenswrapper[4813]: E1007 19:19:28.602262 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:30 crc kubenswrapper[4813]: I1007 19:19:30.601607 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:30 crc kubenswrapper[4813]: I1007 19:19:30.601629 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:30 crc kubenswrapper[4813]: E1007 19:19:30.602161 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:30 crc kubenswrapper[4813]: I1007 19:19:30.601764 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:30 crc kubenswrapper[4813]: I1007 19:19:30.601713 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:30 crc kubenswrapper[4813]: E1007 19:19:30.602302 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:30 crc kubenswrapper[4813]: E1007 19:19:30.602468 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:30 crc kubenswrapper[4813]: E1007 19:19:30.602611 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:32 crc kubenswrapper[4813]: I1007 19:19:32.601588 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:32 crc kubenswrapper[4813]: I1007 19:19:32.601642 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:32 crc kubenswrapper[4813]: E1007 19:19:32.601784 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:32 crc kubenswrapper[4813]: I1007 19:19:32.601810 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:32 crc kubenswrapper[4813]: I1007 19:19:32.601884 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:32 crc kubenswrapper[4813]: E1007 19:19:32.602124 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:32 crc kubenswrapper[4813]: E1007 19:19:32.602766 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:32 crc kubenswrapper[4813]: E1007 19:19:32.602647 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:32 crc kubenswrapper[4813]: I1007 19:19:32.643418 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:32 crc kubenswrapper[4813]: E1007 19:19:32.643630 4813 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:19:32 crc kubenswrapper[4813]: E1007 19:19:32.643746 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs podName:c8c05824-c5ea-44b7-bd35-0c7d6561a61b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:36.643719188 +0000 UTC m=+162.721974829 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs") pod "network-metrics-daemon-nz8v5" (UID: "c8c05824-c5ea-44b7-bd35-0c7d6561a61b") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 07 19:19:34 crc kubenswrapper[4813]: I1007 19:19:34.601648 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:34 crc kubenswrapper[4813]: I1007 19:19:34.603746 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:34 crc kubenswrapper[4813]: E1007 19:19:34.603755 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:34 crc kubenswrapper[4813]: I1007 19:19:34.603793 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:34 crc kubenswrapper[4813]: E1007 19:19:34.604035 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:34 crc kubenswrapper[4813]: I1007 19:19:34.604290 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:34 crc kubenswrapper[4813]: E1007 19:19:34.604489 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:34 crc kubenswrapper[4813]: E1007 19:19:34.605103 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:36 crc kubenswrapper[4813]: I1007 19:19:36.602095 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:36 crc kubenswrapper[4813]: I1007 19:19:36.602131 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:36 crc kubenswrapper[4813]: E1007 19:19:36.602247 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:36 crc kubenswrapper[4813]: I1007 19:19:36.602507 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:36 crc kubenswrapper[4813]: E1007 19:19:36.602599 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:36 crc kubenswrapper[4813]: E1007 19:19:36.602775 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:36 crc kubenswrapper[4813]: I1007 19:19:36.603288 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:36 crc kubenswrapper[4813]: E1007 19:19:36.603584 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:38 crc kubenswrapper[4813]: I1007 19:19:38.603495 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:38 crc kubenswrapper[4813]: I1007 19:19:38.603561 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:38 crc kubenswrapper[4813]: I1007 19:19:38.604625 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:38 crc kubenswrapper[4813]: I1007 19:19:38.604966 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:38 crc kubenswrapper[4813]: E1007 19:19:38.606320 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:38 crc kubenswrapper[4813]: E1007 19:19:38.605116 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:38 crc kubenswrapper[4813]: E1007 19:19:38.605510 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:38 crc kubenswrapper[4813]: I1007 19:19:38.606182 4813 scope.go:117] "RemoveContainer" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" Oct 07 19:19:38 crc kubenswrapper[4813]: E1007 19:19:38.607090 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-vvpdd_openshift-ovn-kubernetes(e3bc364e-ed17-44b8-9942-b41e6b8ac13a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" Oct 07 19:19:38 crc kubenswrapper[4813]: E1007 19:19:38.604951 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:40 crc kubenswrapper[4813]: I1007 19:19:40.601963 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:40 crc kubenswrapper[4813]: I1007 19:19:40.601963 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:40 crc kubenswrapper[4813]: E1007 19:19:40.602174 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:40 crc kubenswrapper[4813]: I1007 19:19:40.601991 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:40 crc kubenswrapper[4813]: I1007 19:19:40.602236 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:40 crc kubenswrapper[4813]: E1007 19:19:40.602388 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:40 crc kubenswrapper[4813]: E1007 19:19:40.602480 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:40 crc kubenswrapper[4813]: E1007 19:19:40.602554 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:42 crc kubenswrapper[4813]: I1007 19:19:42.601935 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:42 crc kubenswrapper[4813]: I1007 19:19:42.601960 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:42 crc kubenswrapper[4813]: I1007 19:19:42.601942 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:42 crc kubenswrapper[4813]: E1007 19:19:42.602070 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:42 crc kubenswrapper[4813]: I1007 19:19:42.601964 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:42 crc kubenswrapper[4813]: E1007 19:19:42.602176 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:42 crc kubenswrapper[4813]: E1007 19:19:42.602437 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:42 crc kubenswrapper[4813]: E1007 19:19:42.603036 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:44 crc kubenswrapper[4813]: I1007 19:19:44.601711 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:44 crc kubenswrapper[4813]: I1007 19:19:44.601869 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:44 crc kubenswrapper[4813]: I1007 19:19:44.601871 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:44 crc kubenswrapper[4813]: I1007 19:19:44.601941 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:44 crc kubenswrapper[4813]: E1007 19:19:44.602793 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:44 crc kubenswrapper[4813]: E1007 19:19:44.602986 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:44 crc kubenswrapper[4813]: E1007 19:19:44.603244 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:44 crc kubenswrapper[4813]: E1007 19:19:44.603438 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:46 crc kubenswrapper[4813]: I1007 19:19:46.601776 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:46 crc kubenswrapper[4813]: I1007 19:19:46.601959 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:46 crc kubenswrapper[4813]: E1007 19:19:46.602126 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:46 crc kubenswrapper[4813]: I1007 19:19:46.602450 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:46 crc kubenswrapper[4813]: E1007 19:19:46.602545 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:46 crc kubenswrapper[4813]: E1007 19:19:46.602807 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:46 crc kubenswrapper[4813]: I1007 19:19:46.603054 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:46 crc kubenswrapper[4813]: E1007 19:19:46.603208 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:48 crc kubenswrapper[4813]: I1007 19:19:48.602013 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:48 crc kubenswrapper[4813]: I1007 19:19:48.602367 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:48 crc kubenswrapper[4813]: E1007 19:19:48.603683 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:48 crc kubenswrapper[4813]: I1007 19:19:48.602447 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:48 crc kubenswrapper[4813]: E1007 19:19:48.603947 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:48 crc kubenswrapper[4813]: E1007 19:19:48.603358 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:48 crc kubenswrapper[4813]: I1007 19:19:48.602418 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:48 crc kubenswrapper[4813]: E1007 19:19:48.604158 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:49 crc kubenswrapper[4813]: I1007 19:19:49.280868 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/1.log" Oct 07 19:19:49 crc kubenswrapper[4813]: I1007 19:19:49.281598 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/0.log" Oct 07 19:19:49 crc kubenswrapper[4813]: I1007 19:19:49.281651 4813 generic.go:334] "Generic (PLEG): container finished" podID="76e24ee5-81b1-4538-aca5-141e399e32e9" containerID="42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9" exitCode=1 Oct 07 19:19:49 crc kubenswrapper[4813]: I1007 19:19:49.281692 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerDied","Data":"42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9"} Oct 07 19:19:49 crc kubenswrapper[4813]: I1007 19:19:49.281736 4813 scope.go:117] "RemoveContainer" containerID="6bd6dc6818987338881abbaba0a871ee6a5bda263a5177586d18b571732ffe97" Oct 07 19:19:49 crc kubenswrapper[4813]: I1007 19:19:49.282241 4813 scope.go:117] "RemoveContainer" containerID="42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9" Oct 07 19:19:49 crc kubenswrapper[4813]: E1007 19:19:49.282495 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gbxzg_openshift-multus(76e24ee5-81b1-4538-aca5-141e399e32e9)\"" pod="openshift-multus/multus-gbxzg" podUID="76e24ee5-81b1-4538-aca5-141e399e32e9" Oct 07 19:19:50 crc kubenswrapper[4813]: I1007 19:19:50.288258 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/1.log" Oct 07 19:19:50 crc kubenswrapper[4813]: I1007 19:19:50.601759 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:50 crc kubenswrapper[4813]: I1007 19:19:50.601840 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:50 crc kubenswrapper[4813]: E1007 19:19:50.602180 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:50 crc kubenswrapper[4813]: I1007 19:19:50.602231 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:50 crc kubenswrapper[4813]: E1007 19:19:50.602377 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:50 crc kubenswrapper[4813]: E1007 19:19:50.602628 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:50 crc kubenswrapper[4813]: I1007 19:19:50.602790 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:50 crc kubenswrapper[4813]: E1007 19:19:50.602941 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:51 crc kubenswrapper[4813]: I1007 19:19:51.602956 4813 scope.go:117] "RemoveContainer" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.297019 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/3.log" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.300046 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerStarted","Data":"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9"} Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.300479 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.326351 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podStartSLOduration=98.326310653 podStartE2EDuration="1m38.326310653s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:19:52.325715136 +0000 UTC m=+118.403970747" watchObservedRunningTime="2025-10-07 19:19:52.326310653 +0000 UTC m=+118.404566264" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.602050 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.602066 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:52 crc kubenswrapper[4813]: E1007 19:19:52.602209 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.602089 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.602079 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:52 crc kubenswrapper[4813]: E1007 19:19:52.602387 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:52 crc kubenswrapper[4813]: E1007 19:19:52.602466 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:52 crc kubenswrapper[4813]: E1007 19:19:52.602564 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:52 crc kubenswrapper[4813]: I1007 19:19:52.656115 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nz8v5"] Oct 07 19:19:53 crc kubenswrapper[4813]: I1007 19:19:53.305406 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:53 crc kubenswrapper[4813]: E1007 19:19:53.305594 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:54 crc kubenswrapper[4813]: E1007 19:19:54.575709 4813 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 07 19:19:54 crc kubenswrapper[4813]: I1007 19:19:54.601768 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:54 crc kubenswrapper[4813]: I1007 19:19:54.601815 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:54 crc kubenswrapper[4813]: I1007 19:19:54.601891 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:54 crc kubenswrapper[4813]: E1007 19:19:54.604186 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:54 crc kubenswrapper[4813]: E1007 19:19:54.604457 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:54 crc kubenswrapper[4813]: E1007 19:19:54.604556 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:54 crc kubenswrapper[4813]: E1007 19:19:54.702130 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 19:19:55 crc kubenswrapper[4813]: I1007 19:19:55.602503 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:55 crc kubenswrapper[4813]: E1007 19:19:55.602956 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:56 crc kubenswrapper[4813]: I1007 19:19:56.602302 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:56 crc kubenswrapper[4813]: I1007 19:19:56.602405 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:56 crc kubenswrapper[4813]: E1007 19:19:56.602514 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:56 crc kubenswrapper[4813]: I1007 19:19:56.602604 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:56 crc kubenswrapper[4813]: E1007 19:19:56.602772 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:56 crc kubenswrapper[4813]: E1007 19:19:56.602847 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:57 crc kubenswrapper[4813]: I1007 19:19:57.601585 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:57 crc kubenswrapper[4813]: E1007 19:19:57.601795 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:58 crc kubenswrapper[4813]: I1007 19:19:58.601849 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:19:58 crc kubenswrapper[4813]: I1007 19:19:58.601930 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:19:58 crc kubenswrapper[4813]: I1007 19:19:58.601883 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:19:58 crc kubenswrapper[4813]: E1007 19:19:58.602050 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:19:58 crc kubenswrapper[4813]: E1007 19:19:58.602201 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:19:58 crc kubenswrapper[4813]: E1007 19:19:58.602393 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:19:59 crc kubenswrapper[4813]: I1007 19:19:59.601833 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:19:59 crc kubenswrapper[4813]: E1007 19:19:59.602049 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:19:59 crc kubenswrapper[4813]: I1007 19:19:59.602680 4813 scope.go:117] "RemoveContainer" containerID="42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9" Oct 07 19:19:59 crc kubenswrapper[4813]: E1007 19:19:59.703785 4813 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 07 19:20:00 crc kubenswrapper[4813]: I1007 19:20:00.333003 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/1.log" Oct 07 19:20:00 crc kubenswrapper[4813]: I1007 19:20:00.333101 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerStarted","Data":"ea12b0917e0500eb874905c967fd2d8059d5c17db195a77e9f1face4a7c3548c"} Oct 07 19:20:00 crc kubenswrapper[4813]: I1007 19:20:00.567657 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:20:00 crc kubenswrapper[4813]: I1007 19:20:00.602455 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:00 crc kubenswrapper[4813]: I1007 19:20:00.602471 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:00 crc kubenswrapper[4813]: E1007 19:20:00.602574 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:20:00 crc kubenswrapper[4813]: E1007 19:20:00.602904 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:20:00 crc kubenswrapper[4813]: I1007 19:20:00.602695 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:00 crc kubenswrapper[4813]: E1007 19:20:00.603182 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:20:01 crc kubenswrapper[4813]: I1007 19:20:01.601964 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:20:01 crc kubenswrapper[4813]: E1007 19:20:01.602460 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:20:02 crc kubenswrapper[4813]: I1007 19:20:02.603601 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:02 crc kubenswrapper[4813]: E1007 19:20:02.603688 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:20:02 crc kubenswrapper[4813]: I1007 19:20:02.603852 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:02 crc kubenswrapper[4813]: E1007 19:20:02.603893 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:20:02 crc kubenswrapper[4813]: I1007 19:20:02.603981 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:02 crc kubenswrapper[4813]: E1007 19:20:02.604023 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:20:03 crc kubenswrapper[4813]: I1007 19:20:03.601538 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:20:03 crc kubenswrapper[4813]: E1007 19:20:03.602360 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nz8v5" podUID="c8c05824-c5ea-44b7-bd35-0c7d6561a61b" Oct 07 19:20:04 crc kubenswrapper[4813]: I1007 19:20:04.602592 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:04 crc kubenswrapper[4813]: I1007 19:20:04.602665 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:04 crc kubenswrapper[4813]: E1007 19:20:04.603570 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 07 19:20:04 crc kubenswrapper[4813]: I1007 19:20:04.603625 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:04 crc kubenswrapper[4813]: E1007 19:20:04.603737 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 07 19:20:04 crc kubenswrapper[4813]: E1007 19:20:04.603819 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 07 19:20:05 crc kubenswrapper[4813]: I1007 19:20:05.602105 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:20:05 crc kubenswrapper[4813]: I1007 19:20:05.605462 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 07 19:20:05 crc kubenswrapper[4813]: I1007 19:20:05.605800 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.421192 4813 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.471419 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dwjg2"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.472012 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:06 crc kubenswrapper[4813]: W1007 19:20:06.477403 4813 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 19:20:06 crc kubenswrapper[4813]: E1007 19:20:06.477444 4813 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 19:20:06 crc kubenswrapper[4813]: W1007 19:20:06.477965 4813 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 19:20:06 crc kubenswrapper[4813]: E1007 19:20:06.477992 4813 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 19:20:06 crc kubenswrapper[4813]: W1007 19:20:06.478391 4813 reflector.go:561] object-"openshift-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 19:20:06 crc kubenswrapper[4813]: E1007 19:20:06.478419 4813 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 19:20:06 crc kubenswrapper[4813]: W1007 19:20:06.478467 4813 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Oct 07 19:20:06 crc kubenswrapper[4813]: E1007 19:20:06.478481 4813 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.478588 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.484767 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.485042 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.489176 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rh8d8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.489810 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.493961 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.494771 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.496664 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-5t4w8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.497468 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.501913 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.502610 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.502953 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zl7gl"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.503108 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.503464 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.503750 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.511503 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.512019 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.512361 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.512381 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.512771 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.513581 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.514049 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.514778 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.515355 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.516183 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.516773 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.516952 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.517084 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.518722 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.519446 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.519788 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bl9th"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.520209 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.522287 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kdlqd"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.522765 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pxbqp"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.524863 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.528497 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.531590 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-ddzzh"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.531859 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.531877 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8st2v"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.531947 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.533710 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.534280 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-9485s"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.534783 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.535151 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.535518 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.554490 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.558164 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.555088 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.555184 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.555515 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.555608 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.555903 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.556528 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.556585 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.556809 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.556875 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557377 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557472 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557588 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557624 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557659 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557698 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.557731 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.558211 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.558376 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.559074 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.584820 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.586340 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.586915 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g4swz"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.585817 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.587473 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.586613 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.587908 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.587971 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.585290 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.588576 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.585702 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.589173 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.591061 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.598159 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.591107 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.591135 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.593866 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.593901 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.593946 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.593984 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.594011 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597359 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.598403 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.598483 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.598720 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.598899 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.599079 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.599317 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.599539 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.599990 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.600265 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.600415 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.607470 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.607616 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.607687 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.607753 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.607836 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.607921 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.610644 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.610835 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.610950 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.593010 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.592890 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9crzl"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.611532 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.611749 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.611942 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.612161 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.612380 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.612561 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.612720 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.612910 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613088 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613267 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613377 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613488 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613608 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613700 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613751 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613808 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613851 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613283 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597558 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597607 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613963 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597644 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597651 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597691 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597710 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.614138 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597723 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597764 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597811 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.613316 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.597522 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.614498 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.615236 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.615281 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.615310 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.625806 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626440 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8d63f51c-2bd6-4eda-accb-b843de96a4c6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626474 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d63f51c-2bd6-4eda-accb-b843de96a4c6-trusted-ca\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626511 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb7cg\" (UniqueName: \"kubernetes.io/projected/8d63f51c-2bd6-4eda-accb-b843de96a4c6-kube-api-access-sb7cg\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626545 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1871c50-e321-48a0-a611-2b92072a18dd-config\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626572 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e1871c50-e321-48a0-a611-2b92072a18dd-machine-approver-tls\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626620 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7079acca-b3b9-4b09-83d1-28dfaaaf3f5d-metrics-tls\") pod \"dns-operator-744455d44c-kdlqd\" (UID: \"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d\") " pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626648 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trzfv\" (UniqueName: \"kubernetes.io/projected/0c06d185-7b7a-448a-8b8b-dcd5a0560a20-kube-api-access-trzfv\") pod \"downloads-7954f5f757-5t4w8\" (UID: \"0c06d185-7b7a-448a-8b8b-dcd5a0560a20\") " pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626668 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d63f51c-2bd6-4eda-accb-b843de96a4c6-metrics-tls\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626694 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npf22\" (UniqueName: \"kubernetes.io/projected/7079acca-b3b9-4b09-83d1-28dfaaaf3f5d-kube-api-access-npf22\") pod \"dns-operator-744455d44c-kdlqd\" (UID: \"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d\") " pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626719 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lr9n\" (UniqueName: \"kubernetes.io/projected/e1871c50-e321-48a0-a611-2b92072a18dd-kube-api-access-2lr9n\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.626744 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e1871c50-e321-48a0-a611-2b92072a18dd-auth-proxy-config\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.627034 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.627486 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.650561 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.650693 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.650877 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.652607 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.654136 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.669020 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.669607 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fbzgp"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.670091 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rh8d8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.670113 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x8dn8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.670481 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nkdg4"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.670648 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.670843 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-glz8b"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.671186 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.671314 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.672460 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.672502 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.672542 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.672573 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.674283 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.677150 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.679280 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.680015 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.680776 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.681123 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.681435 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.682005 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.682100 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.684118 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.684226 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.686115 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dwjg2"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.687526 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.688895 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.694777 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.696293 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-5t4w8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.698673 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-6xnmc"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.699426 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.700175 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.706955 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hhlh8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.707710 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.708052 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.708926 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.709508 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.709556 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.710182 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zl7gl"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.714506 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.715975 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-ddzzh"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.717099 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kdlqd"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.718255 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-4tc9t"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.719310 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.719392 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-klc9x"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.720517 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.720663 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bl9th"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.721946 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.723274 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.726798 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727187 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7079acca-b3b9-4b09-83d1-28dfaaaf3f5d-metrics-tls\") pod \"dns-operator-744455d44c-kdlqd\" (UID: \"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d\") " pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727227 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trzfv\" (UniqueName: \"kubernetes.io/projected/0c06d185-7b7a-448a-8b8b-dcd5a0560a20-kube-api-access-trzfv\") pod \"downloads-7954f5f757-5t4w8\" (UID: \"0c06d185-7b7a-448a-8b8b-dcd5a0560a20\") " pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727252 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d63f51c-2bd6-4eda-accb-b843de96a4c6-metrics-tls\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727311 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npf22\" (UniqueName: \"kubernetes.io/projected/7079acca-b3b9-4b09-83d1-28dfaaaf3f5d-kube-api-access-npf22\") pod \"dns-operator-744455d44c-kdlqd\" (UID: \"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d\") " pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727363 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lr9n\" (UniqueName: \"kubernetes.io/projected/e1871c50-e321-48a0-a611-2b92072a18dd-kube-api-access-2lr9n\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e1871c50-e321-48a0-a611-2b92072a18dd-auth-proxy-config\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727513 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8d63f51c-2bd6-4eda-accb-b843de96a4c6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727551 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d63f51c-2bd6-4eda-accb-b843de96a4c6-trusted-ca\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727574 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb7cg\" (UniqueName: \"kubernetes.io/projected/8d63f51c-2bd6-4eda-accb-b843de96a4c6-kube-api-access-sb7cg\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727611 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e1871c50-e321-48a0-a611-2b92072a18dd-machine-approver-tls\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.727635 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1871c50-e321-48a0-a611-2b92072a18dd-config\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.728602 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8st2v"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.729295 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1871c50-e321-48a0-a611-2b92072a18dd-config\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.729748 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e1871c50-e321-48a0-a611-2b92072a18dd-auth-proxy-config\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.729838 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-9485s"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.732256 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fbzgp"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.732615 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.733341 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8d63f51c-2bd6-4eda-accb-b843de96a4c6-trusted-ca\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.733949 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.735579 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pxbqp"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.735782 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8d63f51c-2bd6-4eda-accb-b843de96a4c6-metrics-tls\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.736743 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e1871c50-e321-48a0-a611-2b92072a18dd-machine-approver-tls\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.740783 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7079acca-b3b9-4b09-83d1-28dfaaaf3f5d-metrics-tls\") pod \"dns-operator-744455d44c-kdlqd\" (UID: \"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d\") " pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.740846 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.742718 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.742902 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.745097 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.745269 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.748392 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9crzl"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.748486 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.748590 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.751909 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-glz8b"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.751940 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.752175 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.754301 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.761687 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x8dn8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.763426 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.766468 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.769374 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-rp89d"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.773930 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.774352 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g4swz"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.777539 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hhlh8"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.780018 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.782021 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6xnmc"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.782357 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-klc9x"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.782492 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.783748 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-rp89d"] Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.797873 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.813839 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.834550 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.855194 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.874602 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.894605 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.914613 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.934649 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.961060 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.975982 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 07 19:20:06 crc kubenswrapper[4813]: I1007 19:20:06.994673 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.015257 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.034929 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.055315 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.074685 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.093697 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.114373 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.148196 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.154437 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.175142 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.194094 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.214757 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.234619 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.254541 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.275006 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.294902 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.314612 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.334315 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.355128 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.374583 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.394113 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.413909 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.435069 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.454841 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.494363 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.533895 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551032 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-client-ca\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551074 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551099 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551114 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmltl\" (UniqueName: \"kubernetes.io/projected/ba658322-d68e-4312-8283-4da69865e460-kube-api-access-qmltl\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551130 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg4nj\" (UniqueName: \"kubernetes.io/projected/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-kube-api-access-rg4nj\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551147 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-trusted-ca-bundle\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551212 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6gr6\" (UniqueName: \"kubernetes.io/projected/67482f17-74c7-49ee-87e2-19f400d5bc22-kube-api-access-d6gr6\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551242 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc8rp\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-kube-api-access-vc8rp\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551282 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdv5k\" (UniqueName: \"kubernetes.io/projected/97e20cdc-f876-4512-b34e-d6aba0790163-kube-api-access-gdv5k\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551303 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0e1732d-26b3-4869-ba17-730e794456c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551367 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/92a1d2ee-54f8-4317-9d9e-c05517cb3020-trusted-ca\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551395 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-ca\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-audit-policies\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551457 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-etcd-client\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551474 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx5z4\" (UniqueName: \"kubernetes.io/projected/bc7c11d5-f79f-4e42-b742-b6b81394aba1-kube-api-access-gx5z4\") pod \"cluster-samples-operator-665b6dd947-w2c5h\" (UID: \"bc7c11d5-f79f-4e42-b742-b6b81394aba1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551488 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnmp6\" (UniqueName: \"kubernetes.io/projected/958766cf-ba8d-4342-a0c2-d8562d930f2e-kube-api-access-nnmp6\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551506 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bea39b1d-02dc-43ee-939b-1849fbd3bedd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551521 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97e20cdc-f876-4512-b34e-d6aba0790163-serving-cert\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551538 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5plvw\" (UniqueName: \"kubernetes.io/projected/b2c17235-16ec-40f2-962a-e6f58a5746a6-kube-api-access-5plvw\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551575 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fwqr\" (UniqueName: \"kubernetes.io/projected/92a1d2ee-54f8-4317-9d9e-c05517cb3020-kube-api-access-8fwqr\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551596 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92a1d2ee-54f8-4317-9d9e-c05517cb3020-serving-cert\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551623 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc7c11d5-f79f-4e42-b742-b6b81394aba1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-w2c5h\" (UID: \"bc7c11d5-f79f-4e42-b742-b6b81394aba1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ee7901fe-8ea0-4eea-8da1-689790a1dc16-node-pullsecrets\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551653 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-image-import-ca\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551668 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec7cf8ef-c153-48f7-ada6-e42400c33682-config\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551685 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551700 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-serving-cert\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551717 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-service-ca\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551745 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-bound-sa-token\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551771 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2zk7\" (UniqueName: \"kubernetes.io/projected/3643284c-f3d9-4db6-9e59-360f3bb62051-kube-api-access-x2zk7\") pod \"migrator-59844c95c7-6lddv\" (UID: \"3643284c-f3d9-4db6-9e59-360f3bb62051\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551791 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-service-ca-bundle\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551812 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-oauth-config\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551828 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-oauth-serving-cert\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551847 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-serving-cert\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551865 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2c17235-16ec-40f2-962a-e6f58a5746a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.551948 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c17235-16ec-40f2-962a-e6f58a5746a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552062 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-serving-cert\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552131 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-config\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec7cf8ef-c153-48f7-ada6-e42400c33682-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-trusted-ca\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552400 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-service-ca\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552419 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-etcd-serving-ca\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552475 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ee7901fe-8ea0-4eea-8da1-689790a1dc16-audit-dir\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552602 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-config\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552665 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958766cf-ba8d-4342-a0c2-d8562d930f2e-serving-cert\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552701 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92a1d2ee-54f8-4317-9d9e-c05517cb3020-config\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552732 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/67482f17-74c7-49ee-87e2-19f400d5bc22-audit-dir\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552765 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-etcd-client\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552825 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71e41549-5fe8-4c2f-98ab-006af97e7b51-serving-cert\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552854 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0e1732d-26b3-4869-ba17-730e794456c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552891 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/97e20cdc-f876-4512-b34e-d6aba0790163-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.552952 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xprds\" (UniqueName: \"kubernetes.io/projected/71e41549-5fe8-4c2f-98ab-006af97e7b51-kube-api-access-xprds\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553006 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0e1732d-26b3-4869-ba17-730e794456c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553051 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553092 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-audit\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553138 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-config\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553217 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-serving-cert\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553281 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553364 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bea39b1d-02dc-43ee-939b-1849fbd3bedd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553418 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-config\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553464 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6f7s\" (UniqueName: \"kubernetes.io/projected/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-kube-api-access-c6f7s\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553508 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-serving-cert\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-client\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.553593 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.053579163 +0000 UTC m=+134.131834774 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553618 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-encryption-config\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553648 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553676 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwmzm\" (UniqueName: \"kubernetes.io/projected/ec7cf8ef-c153-48f7-ada6-e42400c33682-kube-api-access-mwmzm\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553723 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-tls\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553753 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-certificates\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553782 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-config\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-encryption-config\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553839 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrtg9\" (UniqueName: \"kubernetes.io/projected/ee7901fe-8ea0-4eea-8da1-689790a1dc16-kube-api-access-rrtg9\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.553869 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-console-config\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.554478 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.574632 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.596248 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.615023 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.634957 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.654403 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.654613 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.654842 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.154799408 +0000 UTC m=+134.233055099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.654934 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92a1d2ee-54f8-4317-9d9e-c05517cb3020-serving-cert\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.654971 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec7cf8ef-c153-48f7-ada6-e42400c33682-config\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655021 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-socket-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655048 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09d10b11-4fb6-4e1e-8ca2-4bad1f86d804-cert\") pod \"ingress-canary-6xnmc\" (UID: \"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804\") " pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-serving-cert\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655139 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c6nk\" (UniqueName: \"kubernetes.io/projected/2e451e2e-d414-42ea-be0b-4035057c65a6-kube-api-access-5c6nk\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655184 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvzhc\" (UniqueName: \"kubernetes.io/projected/09d10b11-4fb6-4e1e-8ca2-4bad1f86d804-kube-api-access-jvzhc\") pod \"ingress-canary-6xnmc\" (UID: \"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804\") " pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655204 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655250 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2zk7\" (UniqueName: \"kubernetes.io/projected/3643284c-f3d9-4db6-9e59-360f3bb62051-kube-api-access-x2zk7\") pod \"migrator-59844c95c7-6lddv\" (UID: \"3643284c-f3d9-4db6-9e59-360f3bb62051\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655279 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-service-ca-bundle\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655342 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-oauth-serving-cert\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655372 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c17235-16ec-40f2-962a-e6f58a5746a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c472b688-36d7-420d-a4e3-dbd8c4e22714-config\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655448 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4c93eb-b5e7-46fc-9318-f78d1133145c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655493 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655520 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c472b688-36d7-420d-a4e3-dbd8c4e22714-serving-cert\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655542 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655590 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-config\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655611 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec7cf8ef-c153-48f7-ada6-e42400c33682-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/96b602b1-36a6-4e6b-a585-e1dd6378a83c-images\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655683 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/96b602b1-36a6-4e6b-a585-e1dd6378a83c-proxy-tls\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655707 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-service-ca\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655766 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-etcd-serving-ca\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655789 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ee7901fe-8ea0-4eea-8da1-689790a1dc16-audit-dir\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq2kn\" (UniqueName: \"kubernetes.io/projected/f4cef620-3f83-48c3-9894-ddef3458cfb5-kube-api-access-cq2kn\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655835 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d532f776-38c6-40c1-a647-ab1ed0c588b9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tcqtn\" (UID: \"d532f776-38c6-40c1-a647-ab1ed0c588b9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655859 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-default-certificate\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655888 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ml6f\" (UniqueName: \"kubernetes.io/projected/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-kube-api-access-7ml6f\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655913 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92a1d2ee-54f8-4317-9d9e-c05517cb3020-config\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655935 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/67482f17-74c7-49ee-87e2-19f400d5bc22-audit-dir\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655956 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnjxl\" (UniqueName: \"kubernetes.io/projected/2e4f2d5b-adfc-496b-9efe-89d540c1940e-kube-api-access-tnjxl\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.655989 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce72966f-f5d7-4257-983b-d630e5b91b63-service-ca-bundle\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656020 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca47c43c-9e61-4697-b7f5-7cec65e2c992-images\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656040 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xprds\" (UniqueName: \"kubernetes.io/projected/71e41549-5fe8-4c2f-98ab-006af97e7b51-kube-api-access-xprds\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656060 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0e1732d-26b3-4869-ba17-730e794456c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656080 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjf4h\" (UniqueName: \"kubernetes.io/projected/48819027-dd10-43a8-b2f9-18bbefcc9451-kube-api-access-gjf4h\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656121 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/97e20cdc-f876-4512-b34e-d6aba0790163-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656142 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2e451e2e-d414-42ea-be0b-4035057c65a6-srv-cert\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656210 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2e451e2e-d414-42ea-be0b-4035057c65a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656233 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656255 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-audit\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656348 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/acb83cfc-4277-4725-ace9-1469db07a8a5-signing-key\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656371 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwccn\" (UniqueName: \"kubernetes.io/projected/96b602b1-36a6-4e6b-a585-e1dd6378a83c-kube-api-access-nwccn\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656397 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bea39b1d-02dc-43ee-939b-1849fbd3bedd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656428 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-service-ca-bundle\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656447 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6f7s\" (UniqueName: \"kubernetes.io/projected/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-kube-api-access-c6f7s\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656488 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-encryption-config\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656517 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-policies\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656538 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-encryption-config\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656591 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrtg9\" (UniqueName: \"kubernetes.io/projected/ee7901fe-8ea0-4eea-8da1-689790a1dc16-kube-api-access-rrtg9\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656612 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-console-config\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e4f2d5b-adfc-496b-9efe-89d540c1940e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656733 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-certificates\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656782 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-config\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656811 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656834 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656888 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656914 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656961 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmltl\" (UniqueName: \"kubernetes.io/projected/ba658322-d68e-4312-8283-4da69865e460-kube-api-access-qmltl\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.656988 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xpj4\" (UniqueName: \"kubernetes.io/projected/971be5ec-c446-4d3b-bdd3-5ebf739996cc-kube-api-access-7xpj4\") pod \"multus-admission-controller-857f4d67dd-fbzgp\" (UID: \"971be5ec-c446-4d3b-bdd3-5ebf739996cc\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657032 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-plugins-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657057 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg4nj\" (UniqueName: \"kubernetes.io/projected/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-kube-api-access-rg4nj\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657080 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ce999cda-8f25-4691-ac6d-2caa93e8b235-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657144 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/db804415-f392-4f6b-bd38-5be5e5bec45b-metrics-tls\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657209 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657234 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc8rp\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-kube-api-access-vc8rp\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657281 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdv5k\" (UniqueName: \"kubernetes.io/projected/97e20cdc-f876-4512-b34e-d6aba0790163-kube-api-access-gdv5k\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657353 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/971be5ec-c446-4d3b-bdd3-5ebf739996cc-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fbzgp\" (UID: \"971be5ec-c446-4d3b-bdd3-5ebf739996cc\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657378 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkv65\" (UniqueName: \"kubernetes.io/projected/acb83cfc-4277-4725-ace9-1469db07a8a5-kube-api-access-mkv65\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657432 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtlcm\" (UniqueName: \"kubernetes.io/projected/93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2-kube-api-access-jtlcm\") pod \"control-plane-machine-set-operator-78cbb6b69f-cpgrb\" (UID: \"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657458 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-dir\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657460 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-service-ca\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657485 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-oauth-serving-cert\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657481 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d3033a8-cd36-4594-9c06-475b050e82f2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.659168 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92a1d2ee-54f8-4317-9d9e-c05517cb3020-config\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.659191 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0e1732d-26b3-4869-ba17-730e794456c5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.659221 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/67482f17-74c7-49ee-87e2-19f400d5bc22-audit-dir\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.659239 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gphs6\" (UniqueName: \"kubernetes.io/projected/ce72966f-f5d7-4257-983b-d630e5b91b63-kube-api-access-gphs6\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.659264 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-serving-cert\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.659682 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ee7901fe-8ea0-4eea-8da1-689790a1dc16-audit-dir\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660032 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660212 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92a1d2ee-54f8-4317-9d9e-c05517cb3020-serving-cert\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.657132 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c17235-16ec-40f2-962a-e6f58a5746a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660816 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-config\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660826 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec7cf8ef-c153-48f7-ada6-e42400c33682-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660914 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/97e20cdc-f876-4512-b34e-d6aba0790163-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660963 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/92a1d2ee-54f8-4317-9d9e-c05517cb3020-trusted-ca\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.660997 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-ca\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661024 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-audit-policies\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661058 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvvlc\" (UniqueName: \"kubernetes.io/projected/c472b688-36d7-420d-a4e3-dbd8c4e22714-kube-api-access-pvvlc\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661092 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-metrics-certs\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661123 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-etcd-client\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661238 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce999cda-8f25-4691-ac6d-2caa93e8b235-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661272 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bea39b1d-02dc-43ee-939b-1849fbd3bedd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661306 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97e20cdc-f876-4512-b34e-d6aba0790163-serving-cert\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661359 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4nt9\" (UniqueName: \"kubernetes.io/projected/6cdbb125-df14-4347-b188-dc29bd210459-kube-api-access-n4nt9\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661398 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-csi-data-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661413 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-console-config\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661427 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-image-import-ca\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661478 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc7c11d5-f79f-4e42-b742-b6b81394aba1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-w2c5h\" (UID: \"bc7c11d5-f79f-4e42-b742-b6b81394aba1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661508 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ee7901fe-8ea0-4eea-8da1-689790a1dc16-node-pullsecrets\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661535 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-service-ca\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.661645 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bea39b1d-02dc-43ee-939b-1849fbd3bedd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662414 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-service-ca\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662679 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-audit-policies\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662768 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srgx4\" (UniqueName: \"kubernetes.io/projected/2722f9b5-5590-4de0-8932-eb50ff14c085-kube-api-access-srgx4\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662887 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662935 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.662985 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-bound-sa-token\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.663047 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-oauth-config\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.663095 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cpgrb\" (UID: \"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.663195 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2c17235-16ec-40f2-962a-e6f58a5746a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.663247 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z95pc\" (UniqueName: \"kubernetes.io/projected/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-kube-api-access-z95pc\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.663291 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea4c93eb-b5e7-46fc-9318-f78d1133145c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.664057 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-encryption-config\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.664703 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bea39b1d-02dc-43ee-939b-1849fbd3bedd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.665015 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-encryption-config\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.665797 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec7cf8ef-c153-48f7-ada6-e42400c33682-config\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.665854 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/ee7901fe-8ea0-4eea-8da1-689790a1dc16-node-pullsecrets\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.665901 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-certificates\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.666648 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/92a1d2ee-54f8-4317-9d9e-c05517cb3020-trusted-ca\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667102 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-ca\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667115 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-image-import-ca\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667156 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-config\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667254 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-serving-cert\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667303 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d3033a8-cd36-4594-9c06-475b050e82f2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667353 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kp2l\" (UniqueName: \"kubernetes.io/projected/db804415-f392-4f6b-bd38-5be5e5bec45b-kube-api-access-9kp2l\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667400 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-serving-cert\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667431 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce999cda-8f25-4691-ac6d-2caa93e8b235-config\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667489 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-trusted-ca\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667679 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-config\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667723 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2722f9b5-5590-4de0-8932-eb50ff14c085-srv-cert\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667744 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958766cf-ba8d-4342-a0c2-d8562d930f2e-serving-cert\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667775 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-certs\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667791 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-node-bootstrap-token\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667807 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db804415-f392-4f6b-bd38-5be5e5bec45b-config-volume\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667822 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667858 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca47c43c-9e61-4697-b7f5-7cec65e2c992-config\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667876 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667895 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667916 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-etcd-client\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667933 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-apiservice-cert\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.667984 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71e41549-5fe8-4c2f-98ab-006af97e7b51-serving-cert\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.668000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0e1732d-26b3-4869-ba17-730e794456c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.668018 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.668036 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d3033a8-cd36-4594-9c06-475b050e82f2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.668051 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-stats-auth\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.668646 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-trusted-ca\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.669582 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/bc7c11d5-f79f-4e42-b742-b6b81394aba1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-w2c5h\" (UID: \"bc7c11d5-f79f-4e42-b742-b6b81394aba1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.669671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-config\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.669702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-serving-cert\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.669728 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2e4f2d5b-adfc-496b-9efe-89d540c1940e-proxy-tls\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.669764 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.669987 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-webhook-cert\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670031 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-serving-cert\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670054 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-config\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670102 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-serving-cert\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670126 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-client\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670151 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670203 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwmzm\" (UniqueName: \"kubernetes.io/projected/ec7cf8ef-c153-48f7-ada6-e42400c33682-kube-api-access-mwmzm\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670225 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-tls\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670268 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-client-ca\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670294 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlf5m\" (UniqueName: \"kubernetes.io/projected/d532f776-38c6-40c1-a647-ab1ed0c588b9-kube-api-access-zlf5m\") pod \"package-server-manager-789f6589d5-tcqtn\" (UID: \"d532f776-38c6-40c1-a647-ab1ed0c588b9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.670378 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-etcd-client\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.670646 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.170631195 +0000 UTC m=+134.248886816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.671577 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71e41549-5fe8-4c2f-98ab-006af97e7b51-config\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.672104 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2c17235-16ec-40f2-962a-e6f58a5746a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.672593 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71e41549-5fe8-4c2f-98ab-006af97e7b51-serving-cert\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.673021 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-config\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.673993 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.674126 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-serving-cert\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.674195 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ca47c43c-9e61-4697-b7f5-7cec65e2c992-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.674238 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-trusted-ca-bundle\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.675252 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97e20cdc-f876-4512-b34e-d6aba0790163-serving-cert\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.675907 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6gr6\" (UniqueName: \"kubernetes.io/projected/67482f17-74c7-49ee-87e2-19f400d5bc22-kube-api-access-d6gr6\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.676017 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.676089 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/96b602b1-36a6-4e6b-a585-e1dd6378a83c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677008 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677399 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0e1732d-26b3-4869-ba17-730e794456c5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677013 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2722f9b5-5590-4de0-8932-eb50ff14c085-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677707 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rgsc\" (UniqueName: \"kubernetes.io/projected/3d3033a8-cd36-4594-9c06-475b050e82f2-kube-api-access-6rgsc\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677763 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgpjn\" (UniqueName: \"kubernetes.io/projected/ca47c43c-9e61-4697-b7f5-7cec65e2c992-kube-api-access-lgpjn\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0e1732d-26b3-4869-ba17-730e794456c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677870 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99a37f2e-fef1-47f1-ac60-6504a968ebf8-config-volume\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677918 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-mountpoint-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.677990 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx5z4\" (UniqueName: \"kubernetes.io/projected/bc7c11d5-f79f-4e42-b742-b6b81394aba1-kube-api-access-gx5z4\") pod \"cluster-samples-operator-665b6dd947-w2c5h\" (UID: \"bc7c11d5-f79f-4e42-b742-b6b81394aba1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678072 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958766cf-ba8d-4342-a0c2-d8562d930f2e-serving-cert\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678099 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-config\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678089 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnmp6\" (UniqueName: \"kubernetes.io/projected/958766cf-ba8d-4342-a0c2-d8562d930f2e-kube-api-access-nnmp6\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678155 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99a37f2e-fef1-47f1-ac60-6504a968ebf8-secret-volume\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678195 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-tls\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678200 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/acb83cfc-4277-4725-ace9-1469db07a8a5-signing-cabundle\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678249 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blrqj\" (UniqueName: \"kubernetes.io/projected/ea4c93eb-b5e7-46fc-9318-f78d1133145c-kube-api-access-blrqj\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee7901fe-8ea0-4eea-8da1-689790a1dc16-serving-cert\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678489 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-tmpfs\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678506 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-config\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678525 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678666 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-client-ca\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678685 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5plvw\" (UniqueName: \"kubernetes.io/projected/b2c17235-16ec-40f2-962a-e6f58a5746a6-kube-api-access-5plvw\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-registration-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678941 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fwqr\" (UniqueName: \"kubernetes.io/projected/92a1d2ee-54f8-4317-9d9e-c05517cb3020-kube-api-access-8fwqr\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.678985 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj9k9\" (UniqueName: \"kubernetes.io/projected/99a37f2e-fef1-47f1-ac60-6504a968ebf8-kube-api-access-cj9k9\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.679005 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-audit\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.679303 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-trusted-ca-bundle\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.679544 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ee7901fe-8ea0-4eea-8da1-689790a1dc16-etcd-serving-ca\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.679928 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/67482f17-74c7-49ee-87e2-19f400d5bc22-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.681774 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-oauth-config\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.682110 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-etcd-client\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.682400 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-etcd-client\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.684483 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67482f17-74c7-49ee-87e2-19f400d5bc22-serving-cert\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.693114 4813 request.go:700] Waited for 1.01626163s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.695079 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.714763 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.734722 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.754988 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.775500 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780058 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780243 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/acb83cfc-4277-4725-ace9-1469db07a8a5-signing-cabundle\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.780264 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.280238147 +0000 UTC m=+134.358493768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780308 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blrqj\" (UniqueName: \"kubernetes.io/projected/ea4c93eb-b5e7-46fc-9318-f78d1133145c-kube-api-access-blrqj\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780369 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-tmpfs\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780403 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-registration-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780423 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-config\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780455 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj9k9\" (UniqueName: \"kubernetes.io/projected/99a37f2e-fef1-47f1-ac60-6504a968ebf8-kube-api-access-cj9k9\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780476 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-socket-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780509 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09d10b11-4fb6-4e1e-8ca2-4bad1f86d804-cert\") pod \"ingress-canary-6xnmc\" (UID: \"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804\") " pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780540 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c6nk\" (UniqueName: \"kubernetes.io/projected/2e451e2e-d414-42ea-be0b-4035057c65a6-kube-api-access-5c6nk\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780560 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvzhc\" (UniqueName: \"kubernetes.io/projected/09d10b11-4fb6-4e1e-8ca2-4bad1f86d804-kube-api-access-jvzhc\") pod \"ingress-canary-6xnmc\" (UID: \"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804\") " pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780583 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780608 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c472b688-36d7-420d-a4e3-dbd8c4e22714-config\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780659 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4c93eb-b5e7-46fc-9318-f78d1133145c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780673 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-registration-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780681 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780711 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c472b688-36d7-420d-a4e3-dbd8c4e22714-serving-cert\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780728 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780748 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/96b602b1-36a6-4e6b-a585-e1dd6378a83c-images\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780763 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/96b602b1-36a6-4e6b-a585-e1dd6378a83c-proxy-tls\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780779 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq2kn\" (UniqueName: \"kubernetes.io/projected/f4cef620-3f83-48c3-9894-ddef3458cfb5-kube-api-access-cq2kn\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780794 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d532f776-38c6-40c1-a647-ab1ed0c588b9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tcqtn\" (UID: \"d532f776-38c6-40c1-a647-ab1ed0c588b9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780809 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-default-certificate\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780830 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ml6f\" (UniqueName: \"kubernetes.io/projected/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-kube-api-access-7ml6f\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780872 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnjxl\" (UniqueName: \"kubernetes.io/projected/2e4f2d5b-adfc-496b-9efe-89d540c1940e-kube-api-access-tnjxl\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780891 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce72966f-f5d7-4257-983b-d630e5b91b63-service-ca-bundle\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780914 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca47c43c-9e61-4697-b7f5-7cec65e2c992-images\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780937 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjf4h\" (UniqueName: \"kubernetes.io/projected/48819027-dd10-43a8-b2f9-18bbefcc9451-kube-api-access-gjf4h\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780955 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/acb83cfc-4277-4725-ace9-1469db07a8a5-signing-key\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780972 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2e451e2e-d414-42ea-be0b-4035057c65a6-srv-cert\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.780988 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2e451e2e-d414-42ea-be0b-4035057c65a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781002 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781018 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwccn\" (UniqueName: \"kubernetes.io/projected/96b602b1-36a6-4e6b-a585-e1dd6378a83c-kube-api-access-nwccn\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781041 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-policies\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781057 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e4f2d5b-adfc-496b-9efe-89d540c1940e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781083 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781120 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xpj4\" (UniqueName: \"kubernetes.io/projected/971be5ec-c446-4d3b-bdd3-5ebf739996cc-kube-api-access-7xpj4\") pod \"multus-admission-controller-857f4d67dd-fbzgp\" (UID: \"971be5ec-c446-4d3b-bdd3-5ebf739996cc\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781136 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-plugins-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781174 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ce999cda-8f25-4691-ac6d-2caa93e8b235-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781189 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781203 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/db804415-f392-4f6b-bd38-5be5e5bec45b-metrics-tls\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781241 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtlcm\" (UniqueName: \"kubernetes.io/projected/93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2-kube-api-access-jtlcm\") pod \"control-plane-machine-set-operator-78cbb6b69f-cpgrb\" (UID: \"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781258 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/971be5ec-c446-4d3b-bdd3-5ebf739996cc-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fbzgp\" (UID: \"971be5ec-c446-4d3b-bdd3-5ebf739996cc\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781272 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkv65\" (UniqueName: \"kubernetes.io/projected/acb83cfc-4277-4725-ace9-1469db07a8a5-kube-api-access-mkv65\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781290 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-dir\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781305 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d3033a8-cd36-4594-9c06-475b050e82f2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781341 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gphs6\" (UniqueName: \"kubernetes.io/projected/ce72966f-f5d7-4257-983b-d630e5b91b63-kube-api-access-gphs6\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781362 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvvlc\" (UniqueName: \"kubernetes.io/projected/c472b688-36d7-420d-a4e3-dbd8c4e22714-kube-api-access-pvvlc\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781378 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-metrics-certs\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781394 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce999cda-8f25-4691-ac6d-2caa93e8b235-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781409 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4nt9\" (UniqueName: \"kubernetes.io/projected/6cdbb125-df14-4347-b188-dc29bd210459-kube-api-access-n4nt9\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781428 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-csi-data-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781448 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781466 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srgx4\" (UniqueName: \"kubernetes.io/projected/2722f9b5-5590-4de0-8932-eb50ff14c085-kube-api-access-srgx4\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781487 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cpgrb\" (UID: \"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781504 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781524 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z95pc\" (UniqueName: \"kubernetes.io/projected/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-kube-api-access-z95pc\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781539 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea4c93eb-b5e7-46fc-9318-f78d1133145c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781554 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d3033a8-cd36-4594-9c06-475b050e82f2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781569 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kp2l\" (UniqueName: \"kubernetes.io/projected/db804415-f392-4f6b-bd38-5be5e5bec45b-kube-api-access-9kp2l\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781584 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce999cda-8f25-4691-ac6d-2caa93e8b235-config\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781611 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2722f9b5-5590-4de0-8932-eb50ff14c085-srv-cert\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781627 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-certs\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781643 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-node-bootstrap-token\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781657 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db804415-f392-4f6b-bd38-5be5e5bec45b-config-volume\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781672 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781718 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca47c43c-9e61-4697-b7f5-7cec65e2c992-config\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781711 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-socket-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781736 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781763 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-apiservice-cert\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781778 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781796 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d3033a8-cd36-4594-9c06-475b050e82f2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781814 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2e4f2d5b-adfc-496b-9efe-89d540c1940e-proxy-tls\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781829 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-stats-auth\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781847 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781862 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-webhook-cert\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlf5m\" (UniqueName: \"kubernetes.io/projected/d532f776-38c6-40c1-a647-ab1ed0c588b9-kube-api-access-zlf5m\") pod \"package-server-manager-789f6589d5-tcqtn\" (UID: \"d532f776-38c6-40c1-a647-ab1ed0c588b9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781904 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ca47c43c-9e61-4697-b7f5-7cec65e2c992-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781927 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781943 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2722f9b5-5590-4de0-8932-eb50ff14c085-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781959 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rgsc\" (UniqueName: \"kubernetes.io/projected/3d3033a8-cd36-4594-9c06-475b050e82f2-kube-api-access-6rgsc\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781973 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/96b602b1-36a6-4e6b-a585-e1dd6378a83c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781995 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgpjn\" (UniqueName: \"kubernetes.io/projected/ca47c43c-9e61-4697-b7f5-7cec65e2c992-kube-api-access-lgpjn\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782010 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99a37f2e-fef1-47f1-ac60-6504a968ebf8-config-volume\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782025 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-mountpoint-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99a37f2e-fef1-47f1-ac60-6504a968ebf8-secret-volume\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782072 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782643 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782726 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-csi-data-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782937 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-config\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.784006 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/96b602b1-36a6-4e6b-a585-e1dd6378a83c-images\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.784088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-policies\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.782942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c472b688-36d7-420d-a4e3-dbd8c4e22714-config\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.784821 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c472b688-36d7-420d-a4e3-dbd8c4e22714-serving-cert\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.785449 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2e4f2d5b-adfc-496b-9efe-89d540c1940e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.785739 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca47c43c-9e61-4697-b7f5-7cec65e2c992-images\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.793942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.794416 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-plugins-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.794703 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-tmpfs\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.795525 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-dir\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.796590 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.796898 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/96b602b1-36a6-4e6b-a585-e1dd6378a83c-proxy-tls\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.797035 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.297019291 +0000 UTC m=+134.375275032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.797257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca47c43c-9e61-4697-b7f5-7cec65e2c992-config\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.781715 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.798060 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.798096 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6cdbb125-df14-4347-b188-dc29bd210459-mountpoint-dir\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.798399 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.798559 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d3033a8-cd36-4594-9c06-475b050e82f2-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.798851 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d3033a8-cd36-4594-9c06-475b050e82f2-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.798934 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.799018 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/96b602b1-36a6-4e6b-a585-e1dd6378a83c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.799994 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-apiservice-cert\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.803190 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.803788 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-webhook-cert\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.803953 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2e451e2e-d414-42ea-be0b-4035057c65a6-srv-cert\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.805850 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.806087 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ca47c43c-9e61-4697-b7f5-7cec65e2c992-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.806845 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.807282 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.807360 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2e451e2e-d414-42ea-be0b-4035057c65a6-profile-collector-cert\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.808171 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-default-certificate\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.808459 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/971be5ec-c446-4d3b-bdd3-5ebf739996cc-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fbzgp\" (UID: \"971be5ec-c446-4d3b-bdd3-5ebf739996cc\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.808773 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.809504 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99a37f2e-fef1-47f1-ac60-6504a968ebf8-secret-volume\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.809940 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.810783 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/2722f9b5-5590-4de0-8932-eb50ff14c085-profile-collector-cert\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.811713 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-stats-auth\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.812139 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2e4f2d5b-adfc-496b-9efe-89d540c1940e-proxy-tls\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.815212 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.835016 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.842394 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ce72966f-f5d7-4257-983b-d630e5b91b63-metrics-certs\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.855198 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.868158 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/acb83cfc-4277-4725-ace9-1469db07a8a5-signing-key\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.874819 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.882816 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.883489 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.383463569 +0000 UTC m=+134.461719180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.894169 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.901448 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/acb83cfc-4277-4725-ace9-1469db07a8a5-signing-cabundle\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.914027 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.924920 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ce72966f-f5d7-4257-983b-d630e5b91b63-service-ca-bundle\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.934923 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.954152 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.976813 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.987687 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/2722f9b5-5590-4de0-8932-eb50ff14c085-srv-cert\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.994275 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.994301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:07 crc kubenswrapper[4813]: E1007 19:20:07.994586 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.494575004 +0000 UTC m=+134.572830615 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:07 crc kubenswrapper[4813]: I1007 19:20:07.999032 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99a37f2e-fef1-47f1-ac60-6504a968ebf8-config-volume\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.014615 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.034396 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.054704 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.058906 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce999cda-8f25-4691-ac6d-2caa93e8b235-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.074643 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.096148 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.096413 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.596312134 +0000 UTC m=+134.674567785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.097266 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.097382 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.097613 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.59759947 +0000 UTC m=+134.675855191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.105569 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce999cda-8f25-4691-ac6d-2caa93e8b235-config\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.114453 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.127523 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/d532f776-38c6-40c1-a647-ab1ed0c588b9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tcqtn\" (UID: \"d532f776-38c6-40c1-a647-ab1ed0c588b9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.133978 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.154981 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.175814 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.194653 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.198645 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.199039 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.69899461 +0000 UTC m=+134.777250271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.200004 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.200581 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.700557505 +0000 UTC m=+134.778813156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.205680 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/09d10b11-4fb6-4e1e-8ca2-4bad1f86d804-cert\") pod \"ingress-canary-6xnmc\" (UID: \"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804\") " pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.215527 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.236080 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.255603 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.269639 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.285519 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.294392 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.294663 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.301373 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.301438 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.8014249 +0000 UTC m=+134.879680511 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.301723 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.302254 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.802237373 +0000 UTC m=+134.880493004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.314690 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.334510 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.338666 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cpgrb\" (UID: \"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.354926 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.375070 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.378975 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea4c93eb-b5e7-46fc-9318-f78d1133145c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.394539 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.403646 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.403859 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.903825839 +0000 UTC m=+134.982081490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.404493 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.406234 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:08.906204656 +0000 UTC m=+134.984460337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.414487 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.423169 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea4c93eb-b5e7-46fc-9318-f78d1133145c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.435555 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.455295 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.468791 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-certs\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.474013 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.493266 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.497219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-node-bootstrap-token\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.506748 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.506864 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.006838395 +0000 UTC m=+135.085094036 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.506976 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.507411 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.007396081 +0000 UTC m=+135.085651732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.513764 4813 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.534229 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.554800 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.602267 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npf22\" (UniqueName: \"kubernetes.io/projected/7079acca-b3b9-4b09-83d1-28dfaaaf3f5d-kube-api-access-npf22\") pod \"dns-operator-744455d44c-kdlqd\" (UID: \"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d\") " pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.608570 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lr9n\" (UniqueName: \"kubernetes.io/projected/e1871c50-e321-48a0-a611-2b92072a18dd-kube-api-access-2lr9n\") pod \"machine-approver-56656f9798-7tm9r\" (UID: \"e1871c50-e321-48a0-a611-2b92072a18dd\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.608808 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.608911 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.108896905 +0000 UTC m=+135.187152516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.610222 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.611012 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.110960553 +0000 UTC m=+135.189216194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.635037 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.646625 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8d63f51c-2bd6-4eda-accb-b843de96a4c6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.651825 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trzfv\" (UniqueName: \"kubernetes.io/projected/0c06d185-7b7a-448a-8b8b-dcd5a0560a20-kube-api-access-trzfv\") pod \"downloads-7954f5f757-5t4w8\" (UID: \"0c06d185-7b7a-448a-8b8b-dcd5a0560a20\") " pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:08 crc kubenswrapper[4813]: W1007 19:20:08.652727 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1871c50_e321_48a0_a611_2b92072a18dd.slice/crio-aeec5ec8408b5646c92229802c77b6c0107af00200f08fa7209a839cfae89f50 WatchSource:0}: Error finding container aeec5ec8408b5646c92229802c77b6c0107af00200f08fa7209a839cfae89f50: Status 404 returned error can't find the container with id aeec5ec8408b5646c92229802c77b6c0107af00200f08fa7209a839cfae89f50 Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.660651 4813 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.660920 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles podName:8ac797b3-b22d-4c0a-9d08-733d851ad9f2 nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.160884661 +0000 UTC m=+135.239140362 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles") pod "controller-manager-879f6c89f-dwjg2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2") : failed to sync configmap cache: timed out waiting for the condition Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.666249 4813 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.666596 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca podName:8ac797b3-b22d-4c0a-9d08-733d851ad9f2 nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.166561661 +0000 UTC m=+135.244817302 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca") pod "controller-manager-879f6c89f-dwjg2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2") : failed to sync configmap cache: timed out waiting for the condition Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.671066 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb7cg\" (UniqueName: \"kubernetes.io/projected/8d63f51c-2bd6-4eda-accb-b843de96a4c6-kube-api-access-sb7cg\") pod \"ingress-operator-5b745b69d9-9jwj5\" (UID: \"8d63f51c-2bd6-4eda-accb-b843de96a4c6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.676913 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.688859 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/db804415-f392-4f6b-bd38-5be5e5bec45b-config-volume\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.694549 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.701523 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/db804415-f392-4f6b-bd38-5be5e5bec45b-metrics-tls\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.712042 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.712240 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.212213899 +0000 UTC m=+135.290469540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.712702 4813 request.go:700] Waited for 1.938469327s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-dockercfg-jwfmh&limit=500&resourceVersion=0 Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.713099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.714015 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.21399329 +0000 UTC m=+135.292248941 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.715305 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.734914 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.774638 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.807845 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2zk7\" (UniqueName: \"kubernetes.io/projected/3643284c-f3d9-4db6-9e59-360f3bb62051-kube-api-access-x2zk7\") pod \"migrator-59844c95c7-6lddv\" (UID: \"3643284c-f3d9-4db6-9e59-360f3bb62051\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.814581 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.814871 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.314849865 +0000 UTC m=+135.393105476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.815184 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.815758 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.31573317 +0000 UTC m=+135.393988811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.829374 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6f7s\" (UniqueName: \"kubernetes.io/projected/23b2cd20-e7fe-476d-a2eb-05bbe7aa102d-kube-api-access-c6f7s\") pod \"etcd-operator-b45778765-zl7gl\" (UID: \"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.843604 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.849813 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xprds\" (UniqueName: \"kubernetes.io/projected/71e41549-5fe8-4c2f-98ab-006af97e7b51-kube-api-access-xprds\") pod \"authentication-operator-69f744f599-9485s\" (UID: \"71e41549-5fe8-4c2f-98ab-006af97e7b51\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.867694 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmltl\" (UniqueName: \"kubernetes.io/projected/ba658322-d68e-4312-8283-4da69865e460-kube-api-access-qmltl\") pod \"console-f9d7485db-ddzzh\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.869483 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.886750 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.888818 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg4nj\" (UniqueName: \"kubernetes.io/projected/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-kube-api-access-rg4nj\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.892891 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.908127 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.909464 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc8rp\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-kube-api-access-vc8rp\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.916928 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:08 crc kubenswrapper[4813]: E1007 19:20:08.917480 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.41746589 +0000 UTC m=+135.495721501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.934471 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrtg9\" (UniqueName: \"kubernetes.io/projected/ee7901fe-8ea0-4eea-8da1-689790a1dc16-kube-api-access-rrtg9\") pod \"apiserver-76f77b778f-rh8d8\" (UID: \"ee7901fe-8ea0-4eea-8da1-689790a1dc16\") " pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.943732 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.951102 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdv5k\" (UniqueName: \"kubernetes.io/projected/97e20cdc-f876-4512-b34e-d6aba0790163-kube-api-access-gdv5k\") pod \"openshift-config-operator-7777fb866f-8st2v\" (UID: \"97e20cdc-f876-4512-b34e-d6aba0790163\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:08 crc kubenswrapper[4813]: I1007 19:20:08.989543 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-bound-sa-token\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.001973 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwmzm\" (UniqueName: \"kubernetes.io/projected/ec7cf8ef-c153-48f7-ada6-e42400c33682-kube-api-access-mwmzm\") pod \"openshift-apiserver-operator-796bbdcf4f-s2hqz\" (UID: \"ec7cf8ef-c153-48f7-ada6-e42400c33682\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.011747 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6gr6\" (UniqueName: \"kubernetes.io/projected/67482f17-74c7-49ee-87e2-19f400d5bc22-kube-api-access-d6gr6\") pod \"apiserver-7bbb656c7d-2gvws\" (UID: \"67482f17-74c7-49ee-87e2-19f400d5bc22\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.019195 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.019856 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.519841598 +0000 UTC m=+135.598097209 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.050384 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e0e1732d-26b3-4869-ba17-730e794456c5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-698wl\" (UID: \"e0e1732d-26b3-4869-ba17-730e794456c5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.050738 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx5z4\" (UniqueName: \"kubernetes.io/projected/bc7c11d5-f79f-4e42-b742-b6b81394aba1-kube-api-access-gx5z4\") pod \"cluster-samples-operator-665b6dd947-w2c5h\" (UID: \"bc7c11d5-f79f-4e42-b742-b6b81394aba1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.069187 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnmp6\" (UniqueName: \"kubernetes.io/projected/958766cf-ba8d-4342-a0c2-d8562d930f2e-kube-api-access-nnmp6\") pod \"route-controller-manager-6576b87f9c-lq4gb\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.077940 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.086540 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.089637 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5plvw\" (UniqueName: \"kubernetes.io/projected/b2c17235-16ec-40f2-962a-e6f58a5746a6-kube-api-access-5plvw\") pod \"openshift-controller-manager-operator-756b6f6bc6-f2ldv\" (UID: \"b2c17235-16ec-40f2-962a-e6f58a5746a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.095041 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.105011 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.110662 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fwqr\" (UniqueName: \"kubernetes.io/projected/92a1d2ee-54f8-4317-9d9e-c05517cb3020-kube-api-access-8fwqr\") pod \"console-operator-58897d9998-bl9th\" (UID: \"92a1d2ee-54f8-4317-9d9e-c05517cb3020\") " pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.114623 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.120131 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.121160 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.621138355 +0000 UTC m=+135.699393966 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.127916 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.136722 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blrqj\" (UniqueName: \"kubernetes.io/projected/ea4c93eb-b5e7-46fc-9318-f78d1133145c-kube-api-access-blrqj\") pod \"kube-storage-version-migrator-operator-b67b599dd-b6gb7\" (UID: \"ea4c93eb-b5e7-46fc-9318-f78d1133145c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.154511 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c6nk\" (UniqueName: \"kubernetes.io/projected/2e451e2e-d414-42ea-be0b-4035057c65a6-kube-api-access-5c6nk\") pod \"catalog-operator-68c6474976-sbmzw\" (UID: \"2e451e2e-d414-42ea-be0b-4035057c65a6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.162707 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.176606 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.178467 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj9k9\" (UniqueName: \"kubernetes.io/projected/99a37f2e-fef1-47f1-ac60-6504a968ebf8-kube-api-access-cj9k9\") pod \"collect-profiles-29331075-s945m\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.195366 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvzhc\" (UniqueName: \"kubernetes.io/projected/09d10b11-4fb6-4e1e-8ca2-4bad1f86d804-kube-api-access-jvzhc\") pod \"ingress-canary-6xnmc\" (UID: \"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804\") " pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.201194 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.207776 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4nt9\" (UniqueName: \"kubernetes.io/projected/6cdbb125-df14-4347-b188-dc29bd210459-kube-api-access-n4nt9\") pod \"csi-hostpathplugin-klc9x\" (UID: \"6cdbb125-df14-4347-b188-dc29bd210459\") " pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.213079 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.214288 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.222959 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.223311 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.223438 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.223901 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.723888104 +0000 UTC m=+135.802143715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.230053 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srgx4\" (UniqueName: \"kubernetes.io/projected/2722f9b5-5590-4de0-8932-eb50ff14c085-kube-api-access-srgx4\") pod \"olm-operator-6b444d44fb-bfr95\" (UID: \"2722f9b5-5590-4de0-8932-eb50ff14c085\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.245414 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.258881 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z95pc\" (UniqueName: \"kubernetes.io/projected/202dcfb0-10ca-4f73-b5e1-97a33441c1ac-kube-api-access-z95pc\") pod \"machine-config-server-4tc9t\" (UID: \"202dcfb0-10ca-4f73-b5e1-97a33441c1ac\") " pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.267947 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjf4h\" (UniqueName: \"kubernetes.io/projected/48819027-dd10-43a8-b2f9-18bbefcc9451-kube-api-access-gjf4h\") pod \"marketplace-operator-79b997595-hhlh8\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.288155 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq2kn\" (UniqueName: \"kubernetes.io/projected/f4cef620-3f83-48c3-9894-ddef3458cfb5-kube-api-access-cq2kn\") pod \"oauth-openshift-558db77b4-g4swz\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.311602 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kp2l\" (UniqueName: \"kubernetes.io/projected/db804415-f392-4f6b-bd38-5be5e5bec45b-kube-api-access-9kp2l\") pod \"dns-default-rp89d\" (UID: \"db804415-f392-4f6b-bd38-5be5e5bec45b\") " pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.323992 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.327272 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.82725378 +0000 UTC m=+135.905509391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.331917 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkv65\" (UniqueName: \"kubernetes.io/projected/acb83cfc-4277-4725-ace9-1469db07a8a5-kube-api-access-mkv65\") pod \"service-ca-9c57cc56f-x8dn8\" (UID: \"acb83cfc-4277-4725-ace9-1469db07a8a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.357764 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.369063 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.378262 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" event={"ID":"8d63f51c-2bd6-4eda-accb-b843de96a4c6","Type":"ContainerStarted","Data":"51116fb366a7bf342929d94f60334a70d14c0e01064171f9606d437f823e1d94"} Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.378534 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.380840 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xpj4\" (UniqueName: \"kubernetes.io/projected/971be5ec-c446-4d3b-bdd3-5ebf739996cc-kube-api-access-7xpj4\") pod \"multus-admission-controller-857f4d67dd-fbzgp\" (UID: \"971be5ec-c446-4d3b-bdd3-5ebf739996cc\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.383491 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" event={"ID":"e1871c50-e321-48a0-a611-2b92072a18dd","Type":"ContainerStarted","Data":"c7e3eacfda976be2ea56f48c776b70beb9730cc5b69bd68c8527e880dbc6e486"} Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.383543 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" event={"ID":"e1871c50-e321-48a0-a611-2b92072a18dd","Type":"ContainerStarted","Data":"aeec5ec8408b5646c92229802c77b6c0107af00200f08fa7209a839cfae89f50"} Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.403570 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6xnmc" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.404919 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kdlqd"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.406957 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ce999cda-8f25-4691-ac6d-2caa93e8b235-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4vmm4\" (UID: \"ce999cda-8f25-4691-ac6d-2caa93e8b235\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.408784 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-ddzzh"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.408819 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.412670 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d3033a8-cd36-4594-9c06-475b050e82f2-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.413744 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.418354 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtlcm\" (UniqueName: \"kubernetes.io/projected/93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2-kube-api-access-jtlcm\") pod \"control-plane-machine-set-operator-78cbb6b69f-cpgrb\" (UID: \"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.419678 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.428014 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.428300 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:09.92828593 +0000 UTC m=+136.006541541 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.429778 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4tc9t" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.433053 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ml6f\" (UniqueName: \"kubernetes.io/projected/e0c35bd9-fbb2-448c-b1c0-f034529f75c8-kube-api-access-7ml6f\") pod \"packageserver-d55dfcdfc-d7w7r\" (UID: \"e0c35bd9-fbb2-448c-b1c0-f034529f75c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.451286 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.451494 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gphs6\" (UniqueName: \"kubernetes.io/projected/ce72966f-f5d7-4257-983b-d630e5b91b63-kube-api-access-gphs6\") pod \"router-default-5444994796-nkdg4\" (UID: \"ce72966f-f5d7-4257-983b-d630e5b91b63\") " pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.453341 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.482164 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.484387 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-5t4w8"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.489285 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnjxl\" (UniqueName: \"kubernetes.io/projected/2e4f2d5b-adfc-496b-9efe-89d540c1940e-kube-api-access-tnjxl\") pod \"machine-config-controller-84d6567774-mxcgt\" (UID: \"2e4f2d5b-adfc-496b-9efe-89d540c1940e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.500374 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-9485s"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.513798 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwccn\" (UniqueName: \"kubernetes.io/projected/96b602b1-36a6-4e6b-a585-e1dd6378a83c-kube-api-access-nwccn\") pod \"machine-config-operator-74547568cd-b6pcc\" (UID: \"96b602b1-36a6-4e6b-a585-e1dd6378a83c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.515842 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvvlc\" (UniqueName: \"kubernetes.io/projected/c472b688-36d7-420d-a4e3-dbd8c4e22714-kube-api-access-pvvlc\") pod \"service-ca-operator-777779d784-glz8b\" (UID: \"c472b688-36d7-420d-a4e3-dbd8c4e22714\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.529262 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.530502 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.531047 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.031033029 +0000 UTC m=+136.109288640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.531872 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/127b60e9-ca16-4e5c-bd69-6bd6f96625ed-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kdb6c\" (UID: \"127b60e9-ca16-4e5c-bd69-6bd6f96625ed\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.538959 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.553773 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" Oct 07 19:20:09 crc kubenswrapper[4813]: W1007 19:20:09.557307 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c06d185_7b7a_448a_8b8b_dcd5a0560a20.slice/crio-4439f2d1ae933f57c229e3c48cc46dd0d0c133d6d9e343827c9562fe53f4514b WatchSource:0}: Error finding container 4439f2d1ae933f57c229e3c48cc46dd0d0c133d6d9e343827c9562fe53f4514b: Status 404 returned error can't find the container with id 4439f2d1ae933f57c229e3c48cc46dd0d0c133d6d9e343827c9562fe53f4514b Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.558162 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlf5m\" (UniqueName: \"kubernetes.io/projected/d532f776-38c6-40c1-a647-ab1ed0c588b9-kube-api-access-zlf5m\") pod \"package-server-manager-789f6589d5-tcqtn\" (UID: \"d532f776-38c6-40c1-a647-ab1ed0c588b9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.569648 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rgsc\" (UniqueName: \"kubernetes.io/projected/3d3033a8-cd36-4594-9c06-475b050e82f2-kube-api-access-6rgsc\") pod \"cluster-image-registry-operator-dc59b4c8b-jbf7s\" (UID: \"3d3033a8-cd36-4594-9c06-475b050e82f2\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.602251 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.603097 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.607615 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgpjn\" (UniqueName: \"kubernetes.io/projected/ca47c43c-9e61-4697-b7f5-7cec65e2c992-kube-api-access-lgpjn\") pod \"machine-api-operator-5694c8668f-9crzl\" (UID: \"ca47c43c-9e61-4697-b7f5-7cec65e2c992\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.613754 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.629178 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.629506 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.630482 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.634354 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.634628 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.134615551 +0000 UTC m=+136.212871152 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.637869 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.643222 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dwjg2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.643390 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.663556 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.679265 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.685944 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.692616 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:09 crc kubenswrapper[4813]: W1007 19:20:09.723904 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec7cf8ef_c153_48f7_ada6_e42400c33682.slice/crio-ecd1ae428913e45f979f4c840ea33821fca1af2aba5d0cb3fa87fb0af1f36ee4 WatchSource:0}: Error finding container ecd1ae428913e45f979f4c840ea33821fca1af2aba5d0cb3fa87fb0af1f36ee4: Status 404 returned error can't find the container with id ecd1ae428913e45f979f4c840ea33821fca1af2aba5d0cb3fa87fb0af1f36ee4 Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.735370 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.735694 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.235681102 +0000 UTC m=+136.313936713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.791717 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.824629 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.836639 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.836977 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.336962889 +0000 UTC m=+136.415218500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.864060 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.928097 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8st2v"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.930376 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.944827 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:09 crc kubenswrapper[4813]: E1007 19:20:09.945134 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.445117981 +0000 UTC m=+136.523373592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.953443 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.963144 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.965725 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-zl7gl"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.970178 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-bl9th"] Oct 07 19:20:09 crc kubenswrapper[4813]: I1007 19:20:09.976372 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.047999 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.048430 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.548395504 +0000 UTC m=+136.626651115 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.080847 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod958766cf_ba8d_4342_a0c2_d8562d930f2e.slice/crio-feac8abc88945abef2e3007be2119f58ee6d88071cc0cd31f83976bcdca65e43 WatchSource:0}: Error finding container feac8abc88945abef2e3007be2119f58ee6d88071cc0cd31f83976bcdca65e43: Status 404 returned error can't find the container with id feac8abc88945abef2e3007be2119f58ee6d88071cc0cd31f83976bcdca65e43 Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.094308 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97e20cdc_f876_4512_b34e_d6aba0790163.slice/crio-de0d5dd51944c1ea898076221151419ff7758ca82230d8d0c93276520fa347d2 WatchSource:0}: Error finding container de0d5dd51944c1ea898076221151419ff7758ca82230d8d0c93276520fa347d2: Status 404 returned error can't find the container with id de0d5dd51944c1ea898076221151419ff7758ca82230d8d0c93276520fa347d2 Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.100426 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0e1732d_26b3_4869_ba17_730e794456c5.slice/crio-455315d824a9acebdca87b26ef537a1d093e85f6c17c87e53a1d1e662cc8cd9b WatchSource:0}: Error finding container 455315d824a9acebdca87b26ef537a1d093e85f6c17c87e53a1d1e662cc8cd9b: Status 404 returned error can't find the container with id 455315d824a9acebdca87b26ef537a1d093e85f6c17c87e53a1d1e662cc8cd9b Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.124221 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92a1d2ee_54f8_4317_9d9e_c05517cb3020.slice/crio-61b09cca462ddd46cdaa7a27716f961215826aa1d826756a52394b8453d6af74 WatchSource:0}: Error finding container 61b09cca462ddd46cdaa7a27716f961215826aa1d826756a52394b8453d6af74: Status 404 returned error can't find the container with id 61b09cca462ddd46cdaa7a27716f961215826aa1d826756a52394b8453d6af74 Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.128852 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23b2cd20_e7fe_476d_a2eb_05bbe7aa102d.slice/crio-6c6fa535812e7db5d89aa498e90e6130a39c653c00b1d992ff58e673d2084265 WatchSource:0}: Error finding container 6c6fa535812e7db5d89aa498e90e6130a39c653c00b1d992ff58e673d2084265: Status 404 returned error can't find the container with id 6c6fa535812e7db5d89aa498e90e6130a39c653c00b1d992ff58e673d2084265 Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.150306 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.150700 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.65068425 +0000 UTC m=+136.728939861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.156998 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce72966f_f5d7_4257_983b_d630e5b91b63.slice/crio-310b13fb4397c58e7fe831c138b7e71d8fe56af323f110fe20ee9d40b85a6aea WatchSource:0}: Error finding container 310b13fb4397c58e7fe831c138b7e71d8fe56af323f110fe20ee9d40b85a6aea: Status 404 returned error can't find the container with id 310b13fb4397c58e7fe831c138b7e71d8fe56af323f110fe20ee9d40b85a6aea Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.189882 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rh8d8"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.201275 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x8dn8"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.224011 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.231708 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.251830 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.252136 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.752124992 +0000 UTC m=+136.830380603 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.352251 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.353111 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.85308678 +0000 UTC m=+136.931342391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.357462 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hhlh8"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.369069 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-klc9x"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.385893 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.399783 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.399820 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.430104 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" event={"ID":"e1871c50-e321-48a0-a611-2b92072a18dd","Type":"ContainerStarted","Data":"6205c944b3679cdbe5c8493fc249ab447f8f73ad68335ea3a11bcf6146f7a28b"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.444059 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" event={"ID":"2e451e2e-d414-42ea-be0b-4035057c65a6","Type":"ContainerStarted","Data":"c0c41ceabacb05444e4c3a88b32532b11e75c40569edceba715d4a2654b61de7"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.455179 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.456881 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:10.956868728 +0000 UTC m=+137.035124339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.498467 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93cc15e9_3ae8_49f7_a7bf_8a3b0f453ec2.slice/crio-1cf8210ec4831ba99d66607b4d02d6569b9e306d20620429c37663005a780170 WatchSource:0}: Error finding container 1cf8210ec4831ba99d66607b4d02d6569b9e306d20620429c37663005a780170: Status 404 returned error can't find the container with id 1cf8210ec4831ba99d66607b4d02d6569b9e306d20620429c37663005a780170 Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.534141 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6xnmc"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.545163 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.553998 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" event={"ID":"2722f9b5-5590-4de0-8932-eb50ff14c085","Type":"ContainerStarted","Data":"79c6ab05af772ddeebf494aea259b697a5ccd9fbecff1f32de8a2143f1d3ae23"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.559738 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.560406 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-rp89d"] Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.560622 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.060595055 +0000 UTC m=+137.138850666 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.560795 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.561629 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.061616663 +0000 UTC m=+137.139872274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.576271 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" event={"ID":"3643284c-f3d9-4db6-9e59-360f3bb62051","Type":"ContainerStarted","Data":"73e729e722d2afd199ea88772403a53c52d4c4c6ab5d72535564857411e4b59a"} Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.576488 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea4c93eb_b5e7_46fc_9318_f78d1133145c.slice/crio-8cb4a31e698f585a20d1857d275a016379a816c06e12484ff2d912cf86cfafad WatchSource:0}: Error finding container 8cb4a31e698f585a20d1857d275a016379a816c06e12484ff2d912cf86cfafad: Status 404 returned error can't find the container with id 8cb4a31e698f585a20d1857d275a016379a816c06e12484ff2d912cf86cfafad Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.593832 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-glz8b"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.598625 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-5t4w8" event={"ID":"0c06d185-7b7a-448a-8b8b-dcd5a0560a20","Type":"ContainerStarted","Data":"4439f2d1ae933f57c229e3c48cc46dd0d0c133d6d9e343827c9562fe53f4514b"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.600901 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.613009 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.613048 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4tc9t" event={"ID":"202dcfb0-10ca-4f73-b5e1-97a33441c1ac","Type":"ContainerStarted","Data":"31864f0d379274190a6949547b478e7ad7bd5ee3ef9fc5c2bf384bf955e2e4c2"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.613065 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" event={"ID":"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d","Type":"ContainerStarted","Data":"ef343024713fd0376be5a5c912e680d796ebd2d9c19659e1b20256c7fcb51c38"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.613078 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nkdg4" event={"ID":"ce72966f-f5d7-4257-983b-d630e5b91b63","Type":"ContainerStarted","Data":"310b13fb4397c58e7fe831c138b7e71d8fe56af323f110fe20ee9d40b85a6aea"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.613087 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" event={"ID":"e0e1732d-26b3-4869-ba17-730e794456c5","Type":"ContainerStarted","Data":"455315d824a9acebdca87b26ef537a1d093e85f6c17c87e53a1d1e662cc8cd9b"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.614890 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" event={"ID":"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d","Type":"ContainerStarted","Data":"6c6fa535812e7db5d89aa498e90e6130a39c653c00b1d992ff58e673d2084265"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.615688 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" event={"ID":"ee7901fe-8ea0-4eea-8da1-689790a1dc16","Type":"ContainerStarted","Data":"fe3993a7a9c42adaea25e42cdcfd60f7143bd4add4fe367baa1d0a37174ba0ef"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.617395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" event={"ID":"67482f17-74c7-49ee-87e2-19f400d5bc22","Type":"ContainerStarted","Data":"3bf22c01beba5fe22b7f2d5e4359b5b7e9b132ab8c9fc4c0c1070e5d93296b39"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.618019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" event={"ID":"bc7c11d5-f79f-4e42-b742-b6b81394aba1","Type":"ContainerStarted","Data":"1b01af04011148bae66f8d2ec516c85b5407ab637425ba9d4f0339d968b5aa3c"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.620889 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" event={"ID":"b2c17235-16ec-40f2-962a-e6f58a5746a6","Type":"ContainerStarted","Data":"e4ed74f68a77da4f6480f61c4d42a85704dc839aa0b25c8694ae3ef0b6042f28"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.623349 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g4swz"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.632627 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" event={"ID":"97e20cdc-f876-4512-b34e-d6aba0790163","Type":"ContainerStarted","Data":"de0d5dd51944c1ea898076221151419ff7758ca82230d8d0c93276520fa347d2"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.647391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" event={"ID":"8d63f51c-2bd6-4eda-accb-b843de96a4c6","Type":"ContainerStarted","Data":"59d4d57823c41e9e8a31294f96384f2168042912661a858743ab6bd41f5541cc"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.648835 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-bl9th" event={"ID":"92a1d2ee-54f8-4317-9d9e-c05517cb3020","Type":"ContainerStarted","Data":"61b09cca462ddd46cdaa7a27716f961215826aa1d826756a52394b8453d6af74"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.649812 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ddzzh" event={"ID":"ba658322-d68e-4312-8283-4da69865e460","Type":"ContainerStarted","Data":"6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.649828 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ddzzh" event={"ID":"ba658322-d68e-4312-8283-4da69865e460","Type":"ContainerStarted","Data":"d139b91ad6ad2502f54ff8874ed5d7bfc9d31e91639762e5fcbf6c4da5b9de0d"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.651240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" event={"ID":"958766cf-ba8d-4342-a0c2-d8562d930f2e","Type":"ContainerStarted","Data":"feac8abc88945abef2e3007be2119f58ee6d88071cc0cd31f83976bcdca65e43"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.656246 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" event={"ID":"71e41549-5fe8-4c2f-98ab-006af97e7b51","Type":"ContainerStarted","Data":"0f00033784738ffd4c3b123b82fb431ba23cf896ae36601357767af54999de37"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.657336 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" event={"ID":"acb83cfc-4277-4725-ace9-1469db07a8a5","Type":"ContainerStarted","Data":"aa692a35891ee0adc348d663424e3097e519d7ddeb1f6d8e7078a4e192a9cbdd"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.665168 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" event={"ID":"ec7cf8ef-c153-48f7-ada6-e42400c33682","Type":"ContainerStarted","Data":"ecd1ae428913e45f979f4c840ea33821fca1af2aba5d0cb3fa87fb0af1f36ee4"} Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.665705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.665992 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.165978888 +0000 UTC m=+137.244234499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.747726 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7tm9r" podStartSLOduration=116.747707913 podStartE2EDuration="1m56.747707913s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:10.747512398 +0000 UTC m=+136.825767999" watchObservedRunningTime="2025-10-07 19:20:10.747707913 +0000 UTC m=+136.825963524" Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.768871 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.26885623 +0000 UTC m=+137.347111841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.768978 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.776771 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4"] Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.801990 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e4f2d5b_adfc_496b_9efe_89d540c1940e.slice/crio-6a3fe85db74d63286c222b33f4f8d2ef48744de4c4e31bb7eaaedd5ee072e462 WatchSource:0}: Error finding container 6a3fe85db74d63286c222b33f4f8d2ef48744de4c4e31bb7eaaedd5ee072e462: Status 404 returned error can't find the container with id 6a3fe85db74d63286c222b33f4f8d2ef48744de4c4e31bb7eaaedd5ee072e462 Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.807131 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb804415_f392_4f6b_bd38_5be5e5bec45b.slice/crio-0a41bbf354136133624978e5725b52bc2d52931ce27fb7306e4c35e62aeb33b8 WatchSource:0}: Error finding container 0a41bbf354136133624978e5725b52bc2d52931ce27fb7306e4c35e62aeb33b8: Status 404 returned error can't find the container with id 0a41bbf354136133624978e5725b52bc2d52931ce27fb7306e4c35e62aeb33b8 Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.819061 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.852044 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.869710 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.869919 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.36989471 +0000 UTC m=+137.448150331 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: W1007 19:20:10.924454 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc472b688_36d7_420d_a4e3_dbd8c4e22714.slice/crio-9c9101b5eead10da39625b8d65a7f5127b4ba199fa9bc24c4bc164f52dc93470 WatchSource:0}: Error finding container 9c9101b5eead10da39625b8d65a7f5127b4ba199fa9bc24c4bc164f52dc93470: Status 404 returned error can't find the container with id 9c9101b5eead10da39625b8d65a7f5127b4ba199fa9bc24c4bc164f52dc93470 Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.932157 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.951851 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fbzgp"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.955569 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9crzl"] Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.974160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:10 crc kubenswrapper[4813]: E1007 19:20:10.974509 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.474495581 +0000 UTC m=+137.552751192 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:10 crc kubenswrapper[4813]: I1007 19:20:10.984966 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dwjg2"] Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.075061 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.075750 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.575724307 +0000 UTC m=+137.653979958 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: W1007 19:20:11.087292 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96b602b1_36a6_4e6b_a585_e1dd6378a83c.slice/crio-4755a329f1a0be2ac844a37b1fc47a6cc7345191abbb2421681343f700e497c8 WatchSource:0}: Error finding container 4755a329f1a0be2ac844a37b1fc47a6cc7345191abbb2421681343f700e497c8: Status 404 returned error can't find the container with id 4755a329f1a0be2ac844a37b1fc47a6cc7345191abbb2421681343f700e497c8 Oct 07 19:20:11 crc kubenswrapper[4813]: W1007 19:20:11.088425 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd532f776_38c6_40c1_a647_ab1ed0c588b9.slice/crio-6648d0bd66bf863e91e3068e7ed4d94abb868e8ac7f5ac99e547a53870be248d WatchSource:0}: Error finding container 6648d0bd66bf863e91e3068e7ed4d94abb868e8ac7f5ac99e547a53870be248d: Status 404 returned error can't find the container with id 6648d0bd66bf863e91e3068e7ed4d94abb868e8ac7f5ac99e547a53870be248d Oct 07 19:20:11 crc kubenswrapper[4813]: W1007 19:20:11.136024 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d3033a8_cd36_4594_9c06_475b050e82f2.slice/crio-ba41413adf311a2ee9fa402a8ff565d4e1761ce1c3497e02bdd1602a69c0317e WatchSource:0}: Error finding container ba41413adf311a2ee9fa402a8ff565d4e1761ce1c3497e02bdd1602a69c0317e: Status 404 returned error can't find the container with id ba41413adf311a2ee9fa402a8ff565d4e1761ce1c3497e02bdd1602a69c0317e Oct 07 19:20:11 crc kubenswrapper[4813]: W1007 19:20:11.159604 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca47c43c_9e61_4697_b7f5_7cec65e2c992.slice/crio-1a12a8abeb7aacce3a1df118ef713494d86700b8173114488a7cb277bdfc1737 WatchSource:0}: Error finding container 1a12a8abeb7aacce3a1df118ef713494d86700b8173114488a7cb277bdfc1737: Status 404 returned error can't find the container with id 1a12a8abeb7aacce3a1df118ef713494d86700b8173114488a7cb277bdfc1737 Oct 07 19:20:11 crc kubenswrapper[4813]: W1007 19:20:11.168646 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod971be5ec_c446_4d3b_bdd3_5ebf739996cc.slice/crio-29442b0232df9355abde2cf9a64cf2d81db532775c230918051ce0751773bef0 WatchSource:0}: Error finding container 29442b0232df9355abde2cf9a64cf2d81db532775c230918051ce0751773bef0: Status 404 returned error can't find the container with id 29442b0232df9355abde2cf9a64cf2d81db532775c230918051ce0751773bef0 Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.177304 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.177849 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.677829328 +0000 UTC m=+137.756084949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.279435 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.279902 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.779760144 +0000 UTC m=+137.858015775 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.279983 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.280430 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.780419642 +0000 UTC m=+137.858675273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.309490 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-ddzzh" podStartSLOduration=117.309474932 podStartE2EDuration="1m57.309474932s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:11.308286778 +0000 UTC m=+137.386542409" watchObservedRunningTime="2025-10-07 19:20:11.309474932 +0000 UTC m=+137.387730543" Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.381124 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.381577 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.881559826 +0000 UTC m=+137.959815447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.482377 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.482821 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:11.982807422 +0000 UTC m=+138.061063043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.583408 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.583626 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.083605396 +0000 UTC m=+138.161861017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.583727 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.584025 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.084015747 +0000 UTC m=+138.162271358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.669652 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" event={"ID":"f4cef620-3f83-48c3-9894-ddef3458cfb5","Type":"ContainerStarted","Data":"306cf5493be259683fa038a5e14f96efca5f1d61b8c5eb3f2a8d5d384febdc75"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.670534 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" event={"ID":"99a37f2e-fef1-47f1-ac60-6504a968ebf8","Type":"ContainerStarted","Data":"6c62529ec0700671fcf27182e8ffd9b6c461b0160a1b80766665373d6504602f"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.671297 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" event={"ID":"971be5ec-c446-4d3b-bdd3-5ebf739996cc","Type":"ContainerStarted","Data":"29442b0232df9355abde2cf9a64cf2d81db532775c230918051ce0751773bef0"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.673694 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" event={"ID":"96b602b1-36a6-4e6b-a585-e1dd6378a83c","Type":"ContainerStarted","Data":"4755a329f1a0be2ac844a37b1fc47a6cc7345191abbb2421681343f700e497c8"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.674987 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" event={"ID":"ce999cda-8f25-4691-ac6d-2caa93e8b235","Type":"ContainerStarted","Data":"c7707fc446d0963723b1833b12db945a6d07ba49f79ae77912e17a3ab95e5393"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.676960 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" event={"ID":"8d63f51c-2bd6-4eda-accb-b843de96a4c6","Type":"ContainerStarted","Data":"e817007b81685d958fa4506ae6d6c0fc5ff0e34685181cfb1196782b63055df3"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.678419 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" event={"ID":"71e41549-5fe8-4c2f-98ab-006af97e7b51","Type":"ContainerStarted","Data":"c2414840e4b75ed1be524036a277aed6c28bf5ceadcc54cec1a6afdce29a8161"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.679493 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" event={"ID":"ea4c93eb-b5e7-46fc-9318-f78d1133145c","Type":"ContainerStarted","Data":"8cb4a31e698f585a20d1857d275a016379a816c06e12484ff2d912cf86cfafad"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.680870 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" event={"ID":"e0c35bd9-fbb2-448c-b1c0-f034529f75c8","Type":"ContainerStarted","Data":"322d5d73a86e453c7601dce46568bef9ce9b760b2073b95df73e0c37c857e493"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.682173 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" event={"ID":"2e4f2d5b-adfc-496b-9efe-89d540c1940e","Type":"ContainerStarted","Data":"6a3fe85db74d63286c222b33f4f8d2ef48744de4c4e31bb7eaaedd5ee072e462"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.683244 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" event={"ID":"ec7cf8ef-c153-48f7-ada6-e42400c33682","Type":"ContainerStarted","Data":"068ae801e0eb21d62802c5b2cc5b6baf5f93364db10ac605c11395737e110253"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.684150 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" event={"ID":"c472b688-36d7-420d-a4e3-dbd8c4e22714","Type":"ContainerStarted","Data":"9c9101b5eead10da39625b8d65a7f5127b4ba199fa9bc24c4bc164f52dc93470"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.684358 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.684469 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.184445431 +0000 UTC m=+138.262701042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.684745 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.685033 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.185025067 +0000 UTC m=+138.263280678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.685477 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" event={"ID":"127b60e9-ca16-4e5c-bd69-6bd6f96625ed","Type":"ContainerStarted","Data":"403634a34c69b1122afa276dad21fadb90aab41481c3235e8a49a693c55388b1"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.686654 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" event={"ID":"48819027-dd10-43a8-b2f9-18bbefcc9451","Type":"ContainerStarted","Data":"807b2342038b4bb72372fa5100dbba31a0f28c0b83283735e20a9b9ed840b6be"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.687711 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" event={"ID":"6cdbb125-df14-4347-b188-dc29bd210459","Type":"ContainerStarted","Data":"9396a65365798be1fee63d0154eec3bbda0cbf2c0aa1d513390c74eaa36e5b29"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.688582 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" event={"ID":"8ac797b3-b22d-4c0a-9d08-733d851ad9f2","Type":"ContainerStarted","Data":"45b1e88254ef65963ad74c2b8995609f3d59af429f588b5c24d774909354118f"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.689692 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" event={"ID":"3643284c-f3d9-4db6-9e59-360f3bb62051","Type":"ContainerStarted","Data":"349214b01c55407fe4578422a5c2d97f852c2771dfb4559bc2bd8f96fbc9949f"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.690675 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rp89d" event={"ID":"db804415-f392-4f6b-bd38-5be5e5bec45b","Type":"ContainerStarted","Data":"0a41bbf354136133624978e5725b52bc2d52931ce27fb7306e4c35e62aeb33b8"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.691536 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6xnmc" event={"ID":"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804","Type":"ContainerStarted","Data":"db5014318b0dfec9a623c28ababed5da4d5ca8bfecd5d507685ee88c68ef0569"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.692515 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" event={"ID":"d532f776-38c6-40c1-a647-ab1ed0c588b9","Type":"ContainerStarted","Data":"6648d0bd66bf863e91e3068e7ed4d94abb868e8ac7f5ac99e547a53870be248d"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.693529 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" event={"ID":"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2","Type":"ContainerStarted","Data":"1cf8210ec4831ba99d66607b4d02d6569b9e306d20620429c37663005a780170"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.694841 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4tc9t" event={"ID":"202dcfb0-10ca-4f73-b5e1-97a33441c1ac","Type":"ContainerStarted","Data":"8a7ce39cd32e9424f19334666a93465fb2bf82e221613acc0701315d1931f06a"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.696966 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-5t4w8" event={"ID":"0c06d185-7b7a-448a-8b8b-dcd5a0560a20","Type":"ContainerStarted","Data":"0e7ce4c9acfdd3677cb842e22f61ee8b315699db4dfd47a6900577be1bbf75ea"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.696999 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.698198 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" event={"ID":"3d3033a8-cd36-4594-9c06-475b050e82f2","Type":"ContainerStarted","Data":"ba41413adf311a2ee9fa402a8ff565d4e1761ce1c3497e02bdd1602a69c0317e"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.698367 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.698425 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.699686 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" event={"ID":"ca47c43c-9e61-4697-b7f5-7cec65e2c992","Type":"ContainerStarted","Data":"1a12a8abeb7aacce3a1df118ef713494d86700b8173114488a7cb277bdfc1737"} Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.722658 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-4tc9t" podStartSLOduration=5.722639968 podStartE2EDuration="5.722639968s" podCreationTimestamp="2025-10-07 19:20:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:11.70888026 +0000 UTC m=+137.787135871" watchObservedRunningTime="2025-10-07 19:20:11.722639968 +0000 UTC m=+137.800895589" Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.723772 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-5t4w8" podStartSLOduration=117.72376494 podStartE2EDuration="1m57.72376494s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:11.72233802 +0000 UTC m=+137.800593641" watchObservedRunningTime="2025-10-07 19:20:11.72376494 +0000 UTC m=+137.802020551" Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.786024 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.786225 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.286199351 +0000 UTC m=+138.364454982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.786669 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.787922 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.287901959 +0000 UTC m=+138.366157680 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.887976 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.888530 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.388497077 +0000 UTC m=+138.466752728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.888749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.889259 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.389244368 +0000 UTC m=+138.467500009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:11 crc kubenswrapper[4813]: I1007 19:20:11.990352 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:11 crc kubenswrapper[4813]: E1007 19:20:11.990954 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.490927017 +0000 UTC m=+138.569182668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.091663 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.092221 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.592207004 +0000 UTC m=+138.670462625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.193420 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.193610 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.693580744 +0000 UTC m=+138.771836355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.193786 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.194162 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.69414695 +0000 UTC m=+138.772402571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.294471 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.294899 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.794874882 +0000 UTC m=+138.873130503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.396063 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.398227 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.898215617 +0000 UTC m=+138.976471228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.499497 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.499669 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:12.999649059 +0000 UTC m=+139.077904670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.499915 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.500241 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.000226345 +0000 UTC m=+139.078481966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.602724 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.603018 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.102996645 +0000 UTC m=+139.181252266 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.603090 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.603547 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.10353768 +0000 UTC m=+139.181793291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.704313 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.704897 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.204855947 +0000 UTC m=+139.283111558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.705563 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.705924 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.205909297 +0000 UTC m=+139.284164908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.722122 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" event={"ID":"97e20cdc-f876-4512-b34e-d6aba0790163","Type":"ContainerStarted","Data":"a841d8a005344e605bc72c7de037035140a5ff3d2f6a32e8b1c0a5b93a6f5268"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.734824 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nkdg4" event={"ID":"ce72966f-f5d7-4257-983b-d630e5b91b63","Type":"ContainerStarted","Data":"d2f7bb34e07185c1004306448915e50455977930a21e6b63082f961630e207fb"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.745632 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-bl9th" event={"ID":"92a1d2ee-54f8-4317-9d9e-c05517cb3020","Type":"ContainerStarted","Data":"b9d3cfa3e454dcea31baadbb1bad1297c4c98e34401f37d410ca3a489fdea1a2"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.746437 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.750011 4813 patch_prober.go:28] interesting pod/console-operator-58897d9998-bl9th container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.750069 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-bl9th" podUID="92a1d2ee-54f8-4317-9d9e-c05517cb3020" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.756673 4813 generic.go:334] "Generic (PLEG): container finished" podID="67482f17-74c7-49ee-87e2-19f400d5bc22" containerID="c6a61d519aa03234326df032c5ffba338908803c43413e102cffedcc844d0a50" exitCode=0 Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.756777 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" event={"ID":"67482f17-74c7-49ee-87e2-19f400d5bc22","Type":"ContainerDied","Data":"c6a61d519aa03234326df032c5ffba338908803c43413e102cffedcc844d0a50"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.763606 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" event={"ID":"b2c17235-16ec-40f2-962a-e6f58a5746a6","Type":"ContainerStarted","Data":"d6f5ea7278371c02867cb10cfad748153058fbfe9c6ad8b3cd1a998b70b44e82"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.798494 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-bl9th" podStartSLOduration=118.798461028 podStartE2EDuration="1m58.798461028s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.792005776 +0000 UTC m=+138.870261397" watchObservedRunningTime="2025-10-07 19:20:12.798461028 +0000 UTC m=+138.876716639" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.798625 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nkdg4" podStartSLOduration=117.798619863 podStartE2EDuration="1m57.798619863s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.776361315 +0000 UTC m=+138.854616926" watchObservedRunningTime="2025-10-07 19:20:12.798619863 +0000 UTC m=+138.876875474" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.813778 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.815268 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.315246832 +0000 UTC m=+139.393502443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.830362 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" event={"ID":"958766cf-ba8d-4342-a0c2-d8562d930f2e","Type":"ContainerStarted","Data":"34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.831163 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.832531 4813 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-lq4gb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.832565 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.836362 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" event={"ID":"2e451e2e-d414-42ea-be0b-4035057c65a6","Type":"ContainerStarted","Data":"04629f2c658c352406ea520c6c83966dd8561b0614e0b479277da980cdaa31e7"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.840912 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" event={"ID":"99a37f2e-fef1-47f1-ac60-6504a968ebf8","Type":"ContainerStarted","Data":"2eab444bdfada98cb65c27936ef25746a95b220af4227fde68f542eb8b06e3ca"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.843377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" event={"ID":"2722f9b5-5590-4de0-8932-eb50ff14c085","Type":"ContainerStarted","Data":"471e0bdaa0be7ac14578b8dfa9babad21cd88196039d0834fde1b07737df449b"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.846108 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.851401 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" podStartSLOduration=117.851385571 podStartE2EDuration="1m57.851385571s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.848884111 +0000 UTC m=+138.927139712" watchObservedRunningTime="2025-10-07 19:20:12.851385571 +0000 UTC m=+138.929641182" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.852717 4813 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-bfr95 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.852749 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" podUID="2722f9b5-5590-4de0-8932-eb50ff14c085" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.862447 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" event={"ID":"acb83cfc-4277-4725-ace9-1469db07a8a5","Type":"ContainerStarted","Data":"bbb9a4f4a0216908aae60e888340f195f285f746765acab46644cd4225e84ccc"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.873810 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" event={"ID":"3643284c-f3d9-4db6-9e59-360f3bb62051","Type":"ContainerStarted","Data":"060a0efb7a3a78dd23bdf294a1d0bdff3d4573db648109f17ff5853430a98250"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.892796 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" podStartSLOduration=117.892777659 podStartE2EDuration="1m57.892777659s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.875030628 +0000 UTC m=+138.953286239" watchObservedRunningTime="2025-10-07 19:20:12.892777659 +0000 UTC m=+138.971033270" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.898511 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" event={"ID":"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d","Type":"ContainerStarted","Data":"ecc0a85f16374bd3f9c5924609c296fcd0bf569b5157c56877d1bda07972125e"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.900198 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" event={"ID":"bc7c11d5-f79f-4e42-b742-b6b81394aba1","Type":"ContainerStarted","Data":"623cbd6f7ca62dcccf976c8407467b9b7e01d69142807c100b42a86cac1370ea"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.901206 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" event={"ID":"23b2cd20-e7fe-476d-a2eb-05bbe7aa102d","Type":"ContainerStarted","Data":"838a5fca1970c9ae4771b6be52c3f53521d53ad2ff2452be5c8d3333b9ddc559"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.912741 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" event={"ID":"e0e1732d-26b3-4869-ba17-730e794456c5","Type":"ContainerStarted","Data":"23bcf4592f15b2781ec2518e5c2210ad4837237ab9afbf9144a272963d850f6e"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.915825 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.915884 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" event={"ID":"d532f776-38c6-40c1-a647-ab1ed0c588b9","Type":"ContainerStarted","Data":"3356f13bf42a613e9a4d58cad7729c6e5804a34af2d80dcd25a04b3e08e2832c"} Oct 07 19:20:12 crc kubenswrapper[4813]: E1007 19:20:12.917552 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.417520287 +0000 UTC m=+139.495776018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.920743 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-zl7gl" podStartSLOduration=118.920728877 podStartE2EDuration="1m58.920728877s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.917553368 +0000 UTC m=+138.995808979" watchObservedRunningTime="2025-10-07 19:20:12.920728877 +0000 UTC m=+138.998984488" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.920829 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6lddv" podStartSLOduration=117.9208251 podStartE2EDuration="1m57.9208251s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.893562061 +0000 UTC m=+138.971817672" watchObservedRunningTime="2025-10-07 19:20:12.9208251 +0000 UTC m=+138.999080701" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.927490 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" event={"ID":"127b60e9-ca16-4e5c-bd69-6bd6f96625ed","Type":"ContainerStarted","Data":"6d5d75e7cd0cae9c09e0df2cc33c2c95409e13fa5005e5474c1c98c932700659"} Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.929006 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.929040 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.947138 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-698wl" podStartSLOduration=117.947120512 podStartE2EDuration="1m57.947120512s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.941577096 +0000 UTC m=+139.019832697" watchObservedRunningTime="2025-10-07 19:20:12.947120512 +0000 UTC m=+139.025376123" Oct 07 19:20:12 crc kubenswrapper[4813]: I1007 19:20:12.999125 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kdb6c" podStartSLOduration=117.999100138 podStartE2EDuration="1m57.999100138s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.9896026 +0000 UTC m=+139.067858211" watchObservedRunningTime="2025-10-07 19:20:12.999100138 +0000 UTC m=+139.077355779" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.001411 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jwj5" podStartSLOduration=118.001395543 podStartE2EDuration="1m58.001395543s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:12.961454646 +0000 UTC m=+139.039710257" watchObservedRunningTime="2025-10-07 19:20:13.001395543 +0000 UTC m=+139.079651154" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.013145 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s2hqz" podStartSLOduration=118.013111244 podStartE2EDuration="1m58.013111244s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:13.011771606 +0000 UTC m=+139.090027217" watchObservedRunningTime="2025-10-07 19:20:13.013111244 +0000 UTC m=+139.091366855" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.016895 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.034760 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.534729094 +0000 UTC m=+139.612984705 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.060467 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-9485s" podStartSLOduration=118.060443739 podStartE2EDuration="1m58.060443739s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:13.048146362 +0000 UTC m=+139.126401973" watchObservedRunningTime="2025-10-07 19:20:13.060443739 +0000 UTC m=+139.138699350" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.135161 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.137027 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.637006089 +0000 UTC m=+139.715261790 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.236285 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.237000 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.73698556 +0000 UTC m=+139.815241171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.338136 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.338448 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.838435892 +0000 UTC m=+139.916691503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.440978 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.452975 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:13.952956253 +0000 UTC m=+140.031211854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.544508 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.545164 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.045152934 +0000 UTC m=+140.123408535 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.646504 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.646688 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.146663388 +0000 UTC m=+140.224918999 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.646758 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.647068 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.147060659 +0000 UTC m=+140.225316380 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.664732 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.666465 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.666508 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.749830 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.750163 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.250146197 +0000 UTC m=+140.328401808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.750210 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.750589 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.250579759 +0000 UTC m=+140.328835370 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.851674 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.852365 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.35234455 +0000 UTC m=+140.430600161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.932517 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" event={"ID":"ce999cda-8f25-4691-ac6d-2caa93e8b235","Type":"ContainerStarted","Data":"9edd297fa445c73e1ca57209579ac68e1e14daa86dd7ddc5c9a4e3c10b57adc8"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.934081 4813 generic.go:334] "Generic (PLEG): container finished" podID="ee7901fe-8ea0-4eea-8da1-689790a1dc16" containerID="0e025c3728eaf02c8967bcc33c78d2c15395a81c4776919aa362b3012ff4b483" exitCode=0 Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.934130 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" event={"ID":"ee7901fe-8ea0-4eea-8da1-689790a1dc16","Type":"ContainerDied","Data":"0e025c3728eaf02c8967bcc33c78d2c15395a81c4776919aa362b3012ff4b483"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.939567 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" event={"ID":"f4cef620-3f83-48c3-9894-ddef3458cfb5","Type":"ContainerStarted","Data":"6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.939847 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.941306 4813 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-g4swz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.33:6443/healthz\": dial tcp 10.217.0.33:6443: connect: connection refused" start-of-body= Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.941369 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.33:6443/healthz\": dial tcp 10.217.0.33:6443: connect: connection refused" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.943700 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" event={"ID":"ca47c43c-9e61-4697-b7f5-7cec65e2c992","Type":"ContainerStarted","Data":"9d540134f4a8c9956bbd53716ef8171e9ab4bf152b6545acd47a84d91686786e"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.943746 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" event={"ID":"ca47c43c-9e61-4697-b7f5-7cec65e2c992","Type":"ContainerStarted","Data":"3cc283d3bb85edf5707ec4be5d66b57a62cb1dbdc28420ff6954c6b0e22606f4"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.944866 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" event={"ID":"e0c35bd9-fbb2-448c-b1c0-f034529f75c8","Type":"ContainerStarted","Data":"b5d9c158afa76ba03a25f5c036a5a875f33dc1f0577dfe4a8e6446f1451ec7b2"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.945584 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.946814 4813 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-d7w7r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" start-of-body= Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.946855 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" podUID="e0c35bd9-fbb2-448c-b1c0-f034529f75c8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.947701 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" event={"ID":"3d3033a8-cd36-4594-9c06-475b050e82f2","Type":"ContainerStarted","Data":"ae65e795b6f929aa882f3ad5fd6d804d0a78743b997dbac2e84f7261e2ed409a"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.949267 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" event={"ID":"48819027-dd10-43a8-b2f9-18bbefcc9451","Type":"ContainerStarted","Data":"cac4c948f5eec78fb61abb2011836e168d6cb43888db6864d46923b7b6a36cfe"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.950025 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.951113 4813 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hhlh8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.951149 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.952875 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.953708 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6xnmc" event={"ID":"09d10b11-4fb6-4e1e-8ca2-4bad1f86d804","Type":"ContainerStarted","Data":"8e9cc85b97a8f9b0fbf8a629384d7c9dedc37f7afba912b20e3456380f13d635"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.955254 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" event={"ID":"d532f776-38c6-40c1-a647-ab1ed0c588b9","Type":"ContainerStarted","Data":"4cecc99d8c404d0eb384e9c9e70dd9ab3465e13f59f8da9f3f580971690c58c0"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.955478 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:13 crc kubenswrapper[4813]: E1007 19:20:13.955859 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.45584223 +0000 UTC m=+140.534097841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.955689 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4vmm4" podStartSLOduration=118.955676275 podStartE2EDuration="1m58.955676275s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:13.951274461 +0000 UTC m=+140.029530072" watchObservedRunningTime="2025-10-07 19:20:13.955676275 +0000 UTC m=+140.033931886" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.956531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" event={"ID":"ea4c93eb-b5e7-46fc-9318-f78d1133145c","Type":"ContainerStarted","Data":"fbdc12ee9217f85c24735984edc5480643a076b33ac5d96402c0bd89ba511a91"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.958008 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" event={"ID":"bc7c11d5-f79f-4e42-b742-b6b81394aba1","Type":"ContainerStarted","Data":"1c1620034b3e566ab5f4723fe8c5b525c546288d04f5d0b4f06022658d56dbe5"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.964634 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" event={"ID":"7079acca-b3b9-4b09-83d1-28dfaaaf3f5d","Type":"ContainerStarted","Data":"5db0a2d686dbe6cbcdc5b7487db0eed7c379d9f73c4070d131516a3cdde7bac1"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.966770 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" event={"ID":"971be5ec-c446-4d3b-bdd3-5ebf739996cc","Type":"ContainerStarted","Data":"ca1c172d445f89ffa3236730b6c799f77a2368a0db45b160fb6fe8b3f85b9455"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.966816 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" event={"ID":"971be5ec-c446-4d3b-bdd3-5ebf739996cc","Type":"ContainerStarted","Data":"2f1ce461a5c1fe062d8a6b774d0864ed2f91f496ee6de35e848b99be76f6c080"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.968401 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" event={"ID":"96b602b1-36a6-4e6b-a585-e1dd6378a83c","Type":"ContainerStarted","Data":"5c233c189b16292d488a20ac8c918f65e1a69d87ae90149ce151165068ffb51e"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.968439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" event={"ID":"96b602b1-36a6-4e6b-a585-e1dd6378a83c","Type":"ContainerStarted","Data":"00d5339e9f36f9642f1e9f614a6735a15896537554fd557cc51cfd3d67a05605"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.970696 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rp89d" event={"ID":"db804415-f392-4f6b-bd38-5be5e5bec45b","Type":"ContainerStarted","Data":"8e82cae9d0af9a2c8fa7e28805e426d1659cdddc086af24340dade2d67a026eb"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.970732 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rp89d" event={"ID":"db804415-f392-4f6b-bd38-5be5e5bec45b","Type":"ContainerStarted","Data":"08332b6bca901df7556d388b561508b79c988c03bb720c700ba3a803aa8194ea"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.971276 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.973421 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" event={"ID":"2e4f2d5b-adfc-496b-9efe-89d540c1940e","Type":"ContainerStarted","Data":"8b6c9e4844a7e37df5610b9cbe2411f8ba6391848092b1f41935975dcae7a0ad"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.973712 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" event={"ID":"2e4f2d5b-adfc-496b-9efe-89d540c1940e","Type":"ContainerStarted","Data":"da4c8228fc7cedab56bc9a7131b6acc92cb8c4f5004fab0fa22b77b3a7049b6d"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.975450 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" event={"ID":"c472b688-36d7-420d-a4e3-dbd8c4e22714","Type":"ContainerStarted","Data":"979279f9d1c96061c3858195f75055d9056fcf72de71e1f95aa4c3ec6c0e6dd8"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.985567 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" event={"ID":"8ac797b3-b22d-4c0a-9d08-733d851ad9f2","Type":"ContainerStarted","Data":"f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.986315 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.992603 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" event={"ID":"93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2","Type":"ContainerStarted","Data":"337ab4c8c0b76c3bc280221dfa92fd2b7e823b0178b795f23366217e4db00ee1"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.996070 4813 generic.go:334] "Generic (PLEG): container finished" podID="97e20cdc-f876-4512-b34e-d6aba0790163" containerID="a841d8a005344e605bc72c7de037035140a5ff3d2f6a32e8b1c0a5b93a6f5268" exitCode=0 Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.996850 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" event={"ID":"97e20cdc-f876-4512-b34e-d6aba0790163","Type":"ContainerDied","Data":"a841d8a005344e605bc72c7de037035140a5ff3d2f6a32e8b1c0a5b93a6f5268"} Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.998533 4813 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-bfr95 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.998591 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" podUID="2722f9b5-5590-4de0-8932-eb50ff14c085" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.999852 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:13 crc kubenswrapper[4813]: I1007 19:20:13.999950 4813 patch_prober.go:28] interesting pod/console-operator-58897d9998-bl9th container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:13.993246 4813 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dwjg2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.000130 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.000213 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-bl9th" podUID="92a1d2ee-54f8-4317-9d9e-c05517cb3020" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.007450 4813 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-sbmzw container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.007501 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" podUID="2e451e2e-d414-42ea-be0b-4035057c65a6" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.007550 4813 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-lq4gb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.007590 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.024517 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9crzl" podStartSLOduration=119.024502187 podStartE2EDuration="1m59.024502187s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.023629173 +0000 UTC m=+140.101884784" watchObservedRunningTime="2025-10-07 19:20:14.024502187 +0000 UTC m=+140.102757798" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.054700 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.056224 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.556191841 +0000 UTC m=+140.634447452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.066268 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" podStartSLOduration=119.066252895 podStartE2EDuration="1m59.066252895s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.065402351 +0000 UTC m=+140.143657962" watchObservedRunningTime="2025-10-07 19:20:14.066252895 +0000 UTC m=+140.144508506" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.158755 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.165533 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.665519816 +0000 UTC m=+140.743775427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.202440 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" podStartSLOduration=119.202425677 podStartE2EDuration="1m59.202425677s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.183182054 +0000 UTC m=+140.261437665" watchObservedRunningTime="2025-10-07 19:20:14.202425677 +0000 UTC m=+140.280681288" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.204679 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" podStartSLOduration=119.20466702 podStartE2EDuration="1m59.20466702s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.098918457 +0000 UTC m=+140.177174068" watchObservedRunningTime="2025-10-07 19:20:14.20466702 +0000 UTC m=+140.282922631" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.259836 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.260296 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.760272769 +0000 UTC m=+140.838528380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.261002 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.261310 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.761299508 +0000 UTC m=+140.839555129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.327172 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jbf7s" podStartSLOduration=119.327152665 podStartE2EDuration="1m59.327152665s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.268405628 +0000 UTC m=+140.346661249" watchObservedRunningTime="2025-10-07 19:20:14.327152665 +0000 UTC m=+140.405408276" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.329016 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-x8dn8" podStartSLOduration=119.329002208 podStartE2EDuration="1m59.329002208s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.325062586 +0000 UTC m=+140.403318207" watchObservedRunningTime="2025-10-07 19:20:14.329002208 +0000 UTC m=+140.407257819" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.376732 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.376879 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.876852048 +0000 UTC m=+140.955107659 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.377270 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.377559 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.877550917 +0000 UTC m=+140.955806528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.396590 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" podStartSLOduration=120.396573204 podStartE2EDuration="2m0.396573204s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.395986457 +0000 UTC m=+140.474242068" watchObservedRunningTime="2025-10-07 19:20:14.396573204 +0000 UTC m=+140.474828815" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.409027 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b6gb7" podStartSLOduration=119.409011745 podStartE2EDuration="1m59.409011745s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.408376817 +0000 UTC m=+140.486632428" watchObservedRunningTime="2025-10-07 19:20:14.409011745 +0000 UTC m=+140.487267356" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.434509 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cpgrb" podStartSLOduration=119.434493554 podStartE2EDuration="1m59.434493554s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.432810356 +0000 UTC m=+140.511065967" watchObservedRunningTime="2025-10-07 19:20:14.434493554 +0000 UTC m=+140.512749165" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.478514 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.478832 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:14.978818144 +0000 UTC m=+141.057073755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.485186 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" podStartSLOduration=119.485172253 podStartE2EDuration="1m59.485172253s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.46412504 +0000 UTC m=+140.542380661" watchObservedRunningTime="2025-10-07 19:20:14.485172253 +0000 UTC m=+140.563427864" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.485277 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-glz8b" podStartSLOduration=119.485273066 podStartE2EDuration="1m59.485273066s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.484297739 +0000 UTC m=+140.562553350" watchObservedRunningTime="2025-10-07 19:20:14.485273066 +0000 UTC m=+140.563528677" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.556593 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" podStartSLOduration=120.556577218 podStartE2EDuration="2m0.556577218s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.555142887 +0000 UTC m=+140.633398508" watchObservedRunningTime="2025-10-07 19:20:14.556577218 +0000 UTC m=+140.634832829" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.579660 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.579928 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.079916966 +0000 UTC m=+141.158172577 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.588552 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w2c5h" podStartSLOduration=120.58853569 podStartE2EDuration="2m0.58853569s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.587575422 +0000 UTC m=+140.665831033" watchObservedRunningTime="2025-10-07 19:20:14.58853569 +0000 UTC m=+140.666791301" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.634098 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f2ldv" podStartSLOduration=120.634079064 podStartE2EDuration="2m0.634079064s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.611194129 +0000 UTC m=+140.689449740" watchObservedRunningTime="2025-10-07 19:20:14.634079064 +0000 UTC m=+140.712334675" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.657266 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" podStartSLOduration=119.657250148 podStartE2EDuration="1m59.657250148s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.634569428 +0000 UTC m=+140.712825039" watchObservedRunningTime="2025-10-07 19:20:14.657250148 +0000 UTC m=+140.735505749" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.665281 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.665338 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.682887 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-kdlqd" podStartSLOduration=120.682870241 podStartE2EDuration="2m0.682870241s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.659171652 +0000 UTC m=+140.737427263" watchObservedRunningTime="2025-10-07 19:20:14.682870241 +0000 UTC m=+140.761125862" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.685422 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.685828 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.185810274 +0000 UTC m=+141.264065875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.713190 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-b6pcc" podStartSLOduration=119.713171616 podStartE2EDuration="1m59.713171616s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.684465496 +0000 UTC m=+140.762721107" watchObservedRunningTime="2025-10-07 19:20:14.713171616 +0000 UTC m=+140.791427227" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.751831 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-mxcgt" podStartSLOduration=119.751812296 podStartE2EDuration="1m59.751812296s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.720140522 +0000 UTC m=+140.798396133" watchObservedRunningTime="2025-10-07 19:20:14.751812296 +0000 UTC m=+140.830067907" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.754600 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-6xnmc" podStartSLOduration=8.754591604 podStartE2EDuration="8.754591604s" podCreationTimestamp="2025-10-07 19:20:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.751027774 +0000 UTC m=+140.829283385" watchObservedRunningTime="2025-10-07 19:20:14.754591604 +0000 UTC m=+140.832847215" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.779167 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-rp89d" podStartSLOduration=8.779152357 podStartE2EDuration="8.779152357s" podCreationTimestamp="2025-10-07 19:20:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:14.777226783 +0000 UTC m=+140.855482394" watchObservedRunningTime="2025-10-07 19:20:14.779152357 +0000 UTC m=+140.857407968" Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.789450 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.789783 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.289767677 +0000 UTC m=+141.368023288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.892028 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.892542 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.392521996 +0000 UTC m=+141.470777607 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:14 crc kubenswrapper[4813]: I1007 19:20:14.993532 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:14 crc kubenswrapper[4813]: E1007 19:20:14.993842 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.493831024 +0000 UTC m=+141.572086635 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.005430 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" event={"ID":"ee7901fe-8ea0-4eea-8da1-689790a1dc16","Type":"ContainerStarted","Data":"4ba7898ca2236325988998f3ea4dedf6089a7d422d989ecc2e8d26ab373ec8d7"} Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.005470 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" event={"ID":"ee7901fe-8ea0-4eea-8da1-689790a1dc16","Type":"ContainerStarted","Data":"71671318937e7eb8b685133b13af1fda341d7802eacd5783dce335903d24467f"} Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.008574 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" event={"ID":"97e20cdc-f876-4512-b34e-d6aba0790163","Type":"ContainerStarted","Data":"e8ee20b8a4a740eede5035c974a9106cc22d3c0f265049f2d5148921a14af952"} Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.008722 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.010991 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" event={"ID":"67482f17-74c7-49ee-87e2-19f400d5bc22","Type":"ContainerStarted","Data":"2bcf802786223c0d67e25d34241a3f40a3f1eaf2aeec2de6006ff976f40777f3"} Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011813 4813 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dwjg2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011837 4813 patch_prober.go:28] interesting pod/console-operator-58897d9998-bl9th container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011853 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011881 4813 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-d7w7r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011882 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-bl9th" podUID="92a1d2ee-54f8-4317-9d9e-c05517cb3020" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011915 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" podUID="e0c35bd9-fbb2-448c-b1c0-f034529f75c8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": dial tcp 10.217.0.29:5443: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011921 4813 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-sbmzw container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011889 4813 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-g4swz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.33:6443/healthz\": dial tcp 10.217.0.33:6443: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011951 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" podUID="2e451e2e-d414-42ea-be0b-4035057c65a6" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011836 4813 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hhlh8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.011981 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.33:6443/healthz\": dial tcp 10.217.0.33:6443: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.012000 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.012192 4813 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-lq4gb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.012209 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.050513 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" podStartSLOduration=120.050496492 podStartE2EDuration="2m0.050496492s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:15.048917798 +0000 UTC m=+141.127173409" watchObservedRunningTime="2025-10-07 19:20:15.050496492 +0000 UTC m=+141.128752103" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.094245 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.094443 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.594417891 +0000 UTC m=+141.672673502 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.103037 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.103859 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.603847968 +0000 UTC m=+141.682103579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.123097 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" podStartSLOduration=120.12308043 podStartE2EDuration="2m0.12308043s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:15.079615184 +0000 UTC m=+141.157870795" watchObservedRunningTime="2025-10-07 19:20:15.12308043 +0000 UTC m=+141.201336041" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.123603 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" podStartSLOduration=121.123597905 podStartE2EDuration="2m1.123597905s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:15.123299426 +0000 UTC m=+141.201555047" watchObservedRunningTime="2025-10-07 19:20:15.123597905 +0000 UTC m=+141.201853516" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.166337 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-fbzgp" podStartSLOduration=120.16630856 podStartE2EDuration="2m0.16630856s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:15.161213296 +0000 UTC m=+141.239468907" watchObservedRunningTime="2025-10-07 19:20:15.16630856 +0000 UTC m=+141.244564171" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.205719 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.205843 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.705822764 +0000 UTC m=+141.784078375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.206028 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.206351 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.706343949 +0000 UTC m=+141.784599560 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.307448 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.307795 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.807780061 +0000 UTC m=+141.886035672 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.409023 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.409727 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:15.909708476 +0000 UTC m=+141.987964087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.510247 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.510586 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.010566292 +0000 UTC m=+142.088821903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.510685 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.510987 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.010976653 +0000 UTC m=+142.089232264 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.612148 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.612569 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.112551299 +0000 UTC m=+142.190806910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.693292 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:15 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:15 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:15 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.693375 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.713971 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.714351 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.21433698 +0000 UTC m=+142.292592591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.815030 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.815199 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.315171535 +0000 UTC m=+142.393427146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.815399 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.823491 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.323472389 +0000 UTC m=+142.401728000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:15 crc kubenswrapper[4813]: I1007 19:20:15.916367 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:15 crc kubenswrapper[4813]: E1007 19:20:15.916709 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.416694439 +0000 UTC m=+142.494950050 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.017665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.017988 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.517973887 +0000 UTC m=+142.596229498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.029676 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" event={"ID":"6cdbb125-df14-4347-b188-dc29bd210459","Type":"ContainerStarted","Data":"6ffbaffc043d809eb3cc498a814514ad124311e4e7413d96e095c87693fbf908"} Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.030581 4813 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hhlh8 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.030622 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.032030 4813 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dwjg2 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.032077 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.118676 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.118930 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.618898144 +0000 UTC m=+142.697153755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.119022 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.121111 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.621098436 +0000 UTC m=+142.699354147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.220891 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.221113 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.721056366 +0000 UTC m=+142.799311977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.221162 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.221572 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.72155652 +0000 UTC m=+142.799812171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.322234 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.322390 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.822365313 +0000 UTC m=+142.900620924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.322738 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.323139 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.823122995 +0000 UTC m=+142.901378606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.423498 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.423860 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:16.923841166 +0000 UTC m=+143.002096777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.525533 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.525903 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.025888205 +0000 UTC m=+143.104143816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.626870 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.627471 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.127061939 +0000 UTC m=+143.205317570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.627616 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.627973 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.127963145 +0000 UTC m=+143.206218806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.670865 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:16 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:16 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:16 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.670928 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.728760 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.728920 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.228896222 +0000 UTC m=+143.307151833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.728981 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.729376 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.229344595 +0000 UTC m=+143.307600206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.829888 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.830060 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.330027175 +0000 UTC m=+143.408282786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.830487 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.830790 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.330783257 +0000 UTC m=+143.409038868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.931148 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.931387 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.431354064 +0000 UTC m=+143.509609685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:16 crc kubenswrapper[4813]: I1007 19:20:16.931663 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:16 crc kubenswrapper[4813]: E1007 19:20:16.931936 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.43192477 +0000 UTC m=+143.510180381 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.031237 4813 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-d7w7r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.031338 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" podUID="e0c35bd9-fbb2-448c-b1c0-f034529f75c8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.29:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.032986 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.033243 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.533230628 +0000 UTC m=+143.611486229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.034442 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" event={"ID":"6cdbb125-df14-4347-b188-dc29bd210459","Type":"ContainerStarted","Data":"37f0bce5569110bf40fb2b1c860ad06b49763fcb01f14ecbcda05545464c16e8"} Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.134968 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.135360 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.635343589 +0000 UTC m=+143.713599190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.253823 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.254188 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.754173272 +0000 UTC m=+143.832428883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.279623 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7szgp"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.280513 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.300318 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.355030 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-utilities\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.355072 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-catalog-content\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.355115 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.355187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppcvg\" (UniqueName: \"kubernetes.io/projected/b0115c94-6b20-40f6-9507-6997ea307ad4-kube-api-access-ppcvg\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.355362 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.855349506 +0000 UTC m=+143.933605107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.397172 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gcrjl"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.398051 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.400874 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.444547 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7szgp"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.458885 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.459066 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.959042781 +0000 UTC m=+144.037298392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459140 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdpx2\" (UniqueName: \"kubernetes.io/projected/d21c85fa-7b58-4d49-84b5-caa9769bcaed-kube-api-access-tdpx2\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459228 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-utilities\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459269 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-catalog-content\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459335 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-utilities\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459357 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459375 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-catalog-content\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459395 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppcvg\" (UniqueName: \"kubernetes.io/projected/b0115c94-6b20-40f6-9507-6997ea307ad4-kube-api-access-ppcvg\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.459764 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-catalog-content\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.459893 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:17.959886305 +0000 UTC m=+144.038141916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.460189 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-utilities\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.465537 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gcrjl"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.560644 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.560824 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.060798902 +0000 UTC m=+144.139054513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.561032 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-utilities\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.561066 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.561106 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-catalog-content\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.561184 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdpx2\" (UniqueName: \"kubernetes.io/projected/d21c85fa-7b58-4d49-84b5-caa9769bcaed-kube-api-access-tdpx2\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.561440 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.06142808 +0000 UTC m=+144.139683691 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.561526 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-utilities\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.561571 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-catalog-content\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.589706 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdpx2\" (UniqueName: \"kubernetes.io/projected/d21c85fa-7b58-4d49-84b5-caa9769bcaed-kube-api-access-tdpx2\") pod \"certified-operators-gcrjl\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.591530 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-shzbz"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.592602 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.612751 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppcvg\" (UniqueName: \"kubernetes.io/projected/b0115c94-6b20-40f6-9507-6997ea307ad4-kube-api-access-ppcvg\") pod \"community-operators-7szgp\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.622278 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shzbz"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.662958 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.663118 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.163093668 +0000 UTC m=+144.241349279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.663316 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.663412 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ftlb\" (UniqueName: \"kubernetes.io/projected/7a130b1a-2820-4e77-9a0a-80101a7eed1a-kube-api-access-5ftlb\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.663457 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-catalog-content\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.663484 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-utilities\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.663796 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.163786307 +0000 UTC m=+144.242041988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.666581 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:17 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:17 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:17 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.666618 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.711605 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.766886 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.767065 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-catalog-content\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.767090 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-utilities\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.767189 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ftlb\" (UniqueName: \"kubernetes.io/projected/7a130b1a-2820-4e77-9a0a-80101a7eed1a-kube-api-access-5ftlb\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.767535 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.267519824 +0000 UTC m=+144.345775435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.768110 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-catalog-content\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.768310 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-utilities\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.797373 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ftlb\" (UniqueName: \"kubernetes.io/projected/7a130b1a-2820-4e77-9a0a-80101a7eed1a-kube-api-access-5ftlb\") pod \"community-operators-shzbz\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.818171 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r6r8l"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.819117 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.846566 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r6r8l"] Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.868997 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.869374 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq2d6\" (UniqueName: \"kubernetes.io/projected/391edf07-0597-4236-80a3-1a572239f351-kube-api-access-kq2d6\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.869411 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-utilities\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.869484 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-catalog-content\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.869638 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.369622114 +0000 UTC m=+144.447877725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.898567 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.943446 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.970172 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.970392 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.470294164 +0000 UTC m=+144.548549775 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.970588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.970635 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq2d6\" (UniqueName: \"kubernetes.io/projected/391edf07-0597-4236-80a3-1a572239f351-kube-api-access-kq2d6\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.970682 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-utilities\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.970808 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-catalog-content\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: E1007 19:20:17.970893 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.470881231 +0000 UTC m=+144.549136842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.971366 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-utilities\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:17 crc kubenswrapper[4813]: I1007 19:20:17.971636 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-catalog-content\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.036727 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq2d6\" (UniqueName: \"kubernetes.io/projected/391edf07-0597-4236-80a3-1a572239f351-kube-api-access-kq2d6\") pod \"certified-operators-r6r8l\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.058086 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" event={"ID":"6cdbb125-df14-4347-b188-dc29bd210459","Type":"ContainerStarted","Data":"78ef69a692953a3a6daadcad7d5826283af4dd617e924cc59e5740e7eee2444e"} Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.075804 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.076113 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.576097929 +0000 UTC m=+144.654353530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.138887 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.177892 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.178281 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.678270132 +0000 UTC m=+144.756525733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.279788 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.280129 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.780114125 +0000 UTC m=+144.858369736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.382076 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.382629 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.882614047 +0000 UTC m=+144.960869658 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.482998 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.483467 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:18.983452532 +0000 UTC m=+145.061708133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.584058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.584336 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.084309887 +0000 UTC m=+145.162565498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.671839 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:18 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:18 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:18 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.671883 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.690832 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.691261 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.191240604 +0000 UTC m=+145.269496215 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.792137 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.792465 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.292452919 +0000 UTC m=+145.370708530 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.838212 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shzbz"] Oct 07 19:20:18 crc kubenswrapper[4813]: W1007 19:20:18.877496 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a130b1a_2820_4e77_9a0a_80101a7eed1a.slice/crio-a1bf09d9a7052985e30eb092a7f2e24908d8baa46e8483eba59c432fe79846f3 WatchSource:0}: Error finding container a1bf09d9a7052985e30eb092a7f2e24908d8baa46e8483eba59c432fe79846f3: Status 404 returned error can't find the container with id a1bf09d9a7052985e30eb092a7f2e24908d8baa46e8483eba59c432fe79846f3 Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.888688 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.888735 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.890080 4813 patch_prober.go:28] interesting pod/console-f9d7485db-ddzzh container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.890121 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-ddzzh" podUID="ba658322-d68e-4312-8283-4da69865e460" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.893415 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:18 crc kubenswrapper[4813]: E1007 19:20:18.893753 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.393740287 +0000 UTC m=+145.471995898 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.922069 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8st2v" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.946814 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.946881 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.946922 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.946963 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.949771 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.950293 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.958075 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7szgp"] Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.961007 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 07 19:20:18 crc kubenswrapper[4813]: I1007 19:20:18.961318 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.008088 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a10468a-612b-4d62-af42-0a94f36c1997-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.008223 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1a10468a-612b-4d62-af42-0a94f36c1997-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.008291 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.008599 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.508584897 +0000 UTC m=+145.586840508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.013213 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.094999 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shzbz" event={"ID":"7a130b1a-2820-4e77-9a0a-80101a7eed1a","Type":"ContainerStarted","Data":"a1bf09d9a7052985e30eb092a7f2e24908d8baa46e8483eba59c432fe79846f3"} Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.109801 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.110043 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1a10468a-612b-4d62-af42-0a94f36c1997-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.110128 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a10468a-612b-4d62-af42-0a94f36c1997-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.110489 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.610475341 +0000 UTC m=+145.688730942 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.110504 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1a10468a-612b-4d62-af42-0a94f36c1997-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.118752 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.119736 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerStarted","Data":"da0d609fb6851416cee7b4d0f0a85d19f967f453261c5fb0ab28039fcf82fbbb"} Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.120084 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.120099 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.134077 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a10468a-612b-4d62-af42-0a94f36c1997-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.143555 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.210051 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9zbhw"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.213495 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.215096 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.715083902 +0000 UTC m=+145.793339513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.226891 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zbhw"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.226925 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.226938 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.226964 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-bl9th" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.227192 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.228696 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gcrjl"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.242018 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.293708 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.296932 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-sbmzw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.335263 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.335596 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-catalog-content\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.335812 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-utilities\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.335927 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrcc7\" (UniqueName: \"kubernetes.io/projected/94ef8271-5185-462a-97b7-f33732ca1af4-kube-api-access-lrcc7\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.336425 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.836411565 +0000 UTC m=+145.914667176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.395687 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-bfr95" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.398430 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r6r8l"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.415624 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.437392 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.437499 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-utilities\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.437540 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrcc7\" (UniqueName: \"kubernetes.io/projected/94ef8271-5185-462a-97b7-f33732ca1af4-kube-api-access-lrcc7\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.437558 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-catalog-content\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.438238 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-catalog-content\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.438623 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:19.938611498 +0000 UTC m=+146.016867109 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.439863 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-utilities\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.477166 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrcc7\" (UniqueName: \"kubernetes.io/projected/94ef8271-5185-462a-97b7-f33732ca1af4-kube-api-access-lrcc7\") pod \"redhat-marketplace-9zbhw\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.538047 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.539569 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.039546446 +0000 UTC m=+146.117802057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.550628 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.585854 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-76dql"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.586789 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.606415 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-76dql"] Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.631470 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-d7w7r" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.638810 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.639726 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-utilities\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.639774 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgjgs\" (UniqueName: \"kubernetes.io/projected/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-kube-api-access-dgjgs\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.639927 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-catalog-content\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.639965 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.640201 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.140189275 +0000 UTC m=+146.218444876 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.679738 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.693086 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:19 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:19 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:19 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.693426 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.742641 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.742836 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgjgs\" (UniqueName: \"kubernetes.io/projected/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-kube-api-access-dgjgs\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.742982 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-catalog-content\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.743058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-utilities\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.743460 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-utilities\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.743528 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.24351444 +0000 UTC m=+146.321770051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.745196 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-catalog-content\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.781095 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgjgs\" (UniqueName: \"kubernetes.io/projected/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-kube-api-access-dgjgs\") pod \"redhat-marketplace-76dql\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.807252 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.844382 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.847031 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.347016899 +0000 UTC m=+146.425272510 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.925130 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:20:19 crc kubenswrapper[4813]: I1007 19:20:19.946868 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:19 crc kubenswrapper[4813]: E1007 19:20:19.947540 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.447523505 +0000 UTC m=+146.525779116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.031748 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.048229 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.048559 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.548546525 +0000 UTC m=+146.626802136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: W1007 19:20:20.058495 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1a10468a_612b_4d62_af42_0a94f36c1997.slice/crio-5305280409b7054416056850868e6f93fe24eea5589ca31810ce506f11657f8b WatchSource:0}: Error finding container 5305280409b7054416056850868e6f93fe24eea5589ca31810ce506f11657f8b: Status 404 returned error can't find the container with id 5305280409b7054416056850868e6f93fe24eea5589ca31810ce506f11657f8b Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.127297 4813 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.132250 4813 generic.go:334] "Generic (PLEG): container finished" podID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerID="f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30" exitCode=0 Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.132918 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerDied","Data":"f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.146229 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1a10468a-612b-4d62-af42-0a94f36c1997","Type":"ContainerStarted","Data":"5305280409b7054416056850868e6f93fe24eea5589ca31810ce506f11657f8b"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.146743 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zbhw"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.146851 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.150448 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.150559 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.650536572 +0000 UTC m=+146.728792183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.150811 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.151269 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.651256523 +0000 UTC m=+146.729512134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.153027 4813 generic.go:334] "Generic (PLEG): container finished" podID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerID="84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32" exitCode=0 Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.153073 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shzbz" event={"ID":"7a130b1a-2820-4e77-9a0a-80101a7eed1a","Type":"ContainerDied","Data":"84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.222174 4813 generic.go:334] "Generic (PLEG): container finished" podID="391edf07-0597-4236-80a3-1a572239f351" containerID="1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e" exitCode=0 Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.223649 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerDied","Data":"1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.223671 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerStarted","Data":"4e1bd5f7c10b767a1bd6e9ccc4aef1c19e8060520a5920647e0ac18f3aa9c67f"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.234690 4813 generic.go:334] "Generic (PLEG): container finished" podID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerID="896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329" exitCode=0 Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.234828 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerDied","Data":"896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.234910 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerStarted","Data":"508d71b4b230d206a7970ba663030522b906535d90cbfa23e943470ff3ab2f2b"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.253218 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.254688 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.75466971 +0000 UTC m=+146.832925321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.255352 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.755343909 +0000 UTC m=+146.833599520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.255101 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.322909 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" event={"ID":"6cdbb125-df14-4347-b188-dc29bd210459","Type":"ContainerStarted","Data":"00fba7859ece836bff9a2a1afb73eeeadfb50ea083bccc9eb222ea57a8ced015"} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.339919 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2gvws" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.364827 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.366250 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.866233927 +0000 UTC m=+146.944489538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.411392 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-klc9x" podStartSLOduration=14.411373861 podStartE2EDuration="14.411373861s" podCreationTimestamp="2025-10-07 19:20:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:20.363663575 +0000 UTC m=+146.441919186" watchObservedRunningTime="2025-10-07 19:20:20.411373861 +0000 UTC m=+146.489629472" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.411772 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cgcn7"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.419160 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.423768 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.430526 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgcn7"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.471923 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.471996 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5srw\" (UniqueName: \"kubernetes.io/projected/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-kube-api-access-c5srw\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.472023 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-catalog-content\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.472059 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-utilities\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.472644 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:20.972626729 +0000 UTC m=+147.050882330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.573776 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.574185 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5srw\" (UniqueName: \"kubernetes.io/projected/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-kube-api-access-c5srw\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.574269 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-catalog-content\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.574358 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-utilities\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.574791 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-utilities\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.574929 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-07 19:20:21.074914165 +0000 UTC m=+147.153169776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.575452 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-catalog-content\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.613094 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-76dql"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.627337 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5srw\" (UniqueName: \"kubernetes.io/projected/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-kube-api-access-c5srw\") pod \"redhat-operators-cgcn7\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.679450 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:20 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:20 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:20 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.679516 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.680086 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:20 crc kubenswrapper[4813]: E1007 19:20:20.680487 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-07 19:20:21.180472663 +0000 UTC m=+147.258728274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pxbqp" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.751905 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.769258 4813 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-07T19:20:20.127317747Z","Handler":null,"Name":""} Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.771795 4813 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.771820 4813 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.781204 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.833496 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-78vxl"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.834449 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.883630 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spg5j\" (UniqueName: \"kubernetes.io/projected/bd6bd646-c9f5-493f-8301-817d018a8f00-kube-api-access-spg5j\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.883678 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-utilities\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.883886 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-catalog-content\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.892502 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.913861 4813 patch_prober.go:28] interesting pod/apiserver-76f77b778f-rh8d8 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]log ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]etcd ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/generic-apiserver-start-informers ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/max-in-flight-filter ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 07 19:20:20 crc kubenswrapper[4813]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 07 19:20:20 crc kubenswrapper[4813]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/project.openshift.io-projectcache ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 07 19:20:20 crc kubenswrapper[4813]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 07 19:20:20 crc kubenswrapper[4813]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 07 19:20:20 crc kubenswrapper[4813]: livez check failed Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.913913 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" podUID="ee7901fe-8ea0-4eea-8da1-689790a1dc16" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.920286 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-78vxl"] Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.984751 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-catalog-content\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.984801 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spg5j\" (UniqueName: \"kubernetes.io/projected/bd6bd646-c9f5-493f-8301-817d018a8f00-kube-api-access-spg5j\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.984816 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-utilities\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.984855 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.985498 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-catalog-content\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:20 crc kubenswrapper[4813]: I1007 19:20:20.985934 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-utilities\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.009831 4813 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.009870 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.037068 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spg5j\" (UniqueName: \"kubernetes.io/projected/bd6bd646-c9f5-493f-8301-817d018a8f00-kube-api-access-spg5j\") pod \"redhat-operators-78vxl\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.150005 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.237527 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pxbqp\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.324609 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cgcn7"] Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.360291 4813 generic.go:334] "Generic (PLEG): container finished" podID="94ef8271-5185-462a-97b7-f33732ca1af4" containerID="df11a89c5999d55191eb6a166f7f38bbe24d00cd2961b79f9a07fa69661c7046" exitCode=0 Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.360383 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zbhw" event={"ID":"94ef8271-5185-462a-97b7-f33732ca1af4","Type":"ContainerDied","Data":"df11a89c5999d55191eb6a166f7f38bbe24d00cd2961b79f9a07fa69661c7046"} Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.360407 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zbhw" event={"ID":"94ef8271-5185-462a-97b7-f33732ca1af4","Type":"ContainerStarted","Data":"a47d0d8a805306feee0c2cec5a4c310963349e4be2a297cd75930a78ae90bd05"} Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.365395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1a10468a-612b-4d62-af42-0a94f36c1997","Type":"ContainerStarted","Data":"89d3ee033d5674eba98eb20cc0438a7d99e84f7b665bd1761a5470c5cd985140"} Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.368050 4813 generic.go:334] "Generic (PLEG): container finished" podID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerID="c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7" exitCode=0 Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.368822 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76dql" event={"ID":"0caa8b2c-1ed6-4162-856a-1d08c578cdd8","Type":"ContainerDied","Data":"c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7"} Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.368849 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76dql" event={"ID":"0caa8b2c-1ed6-4162-856a-1d08c578cdd8","Type":"ContainerStarted","Data":"1791fdd6535093e8fc031ab204d39a0e473a67795f096d051eaed44871f4d70b"} Oct 07 19:20:21 crc kubenswrapper[4813]: W1007 19:20:21.369825 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d43fc22_f0c1_46f7_bf20_1245eac2b00f.slice/crio-d4dc8ce2a5a7be19f6127ca088d7a9ff4d73b343fd408a56d42428e0c015e106 WatchSource:0}: Error finding container d4dc8ce2a5a7be19f6127ca088d7a9ff4d73b343fd408a56d42428e0c015e106: Status 404 returned error can't find the container with id d4dc8ce2a5a7be19f6127ca088d7a9ff4d73b343fd408a56d42428e0c015e106 Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.478207 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.669889 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:21 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:21 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:21 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.670173 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.678666 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.678646483 podStartE2EDuration="3.678646483s" podCreationTimestamp="2025-10-07 19:20:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:21.469678998 +0000 UTC m=+147.547934609" watchObservedRunningTime="2025-10-07 19:20:21.678646483 +0000 UTC m=+147.756902094" Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.679069 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-78vxl"] Oct 07 19:20:21 crc kubenswrapper[4813]: I1007 19:20:21.835975 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pxbqp"] Oct 07 19:20:21 crc kubenswrapper[4813]: W1007 19:20:21.858412 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbea39b1d_02dc_43ee_939b_1849fbd3bedd.slice/crio-8a5168fcd70c994da3785a7e12791ebd0ae046b5d22b6f7ce7890d11d9825bbc WatchSource:0}: Error finding container 8a5168fcd70c994da3785a7e12791ebd0ae046b5d22b6f7ce7890d11d9825bbc: Status 404 returned error can't find the container with id 8a5168fcd70c994da3785a7e12791ebd0ae046b5d22b6f7ce7890d11d9825bbc Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.034457 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.035056 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.037760 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.039047 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.047002 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.080385 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.080667 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.104241 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.104442 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.205699 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.205790 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.205879 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.242241 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.347591 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.374240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerStarted","Data":"846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.374288 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerStarted","Data":"63958825e0a92697f58ad48893cab084c7eb9455457163fc46499a3ce5f3bdbf"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.378279 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerStarted","Data":"baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.378311 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerStarted","Data":"d4dc8ce2a5a7be19f6127ca088d7a9ff4d73b343fd408a56d42428e0c015e106"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.380284 4813 generic.go:334] "Generic (PLEG): container finished" podID="1a10468a-612b-4d62-af42-0a94f36c1997" containerID="89d3ee033d5674eba98eb20cc0438a7d99e84f7b665bd1761a5470c5cd985140" exitCode=0 Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.380970 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1a10468a-612b-4d62-af42-0a94f36c1997","Type":"ContainerDied","Data":"89d3ee033d5674eba98eb20cc0438a7d99e84f7b665bd1761a5470c5cd985140"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.383834 4813 generic.go:334] "Generic (PLEG): container finished" podID="99a37f2e-fef1-47f1-ac60-6504a968ebf8" containerID="2eab444bdfada98cb65c27936ef25746a95b220af4227fde68f542eb8b06e3ca" exitCode=0 Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.383890 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" event={"ID":"99a37f2e-fef1-47f1-ac60-6504a968ebf8","Type":"ContainerDied","Data":"2eab444bdfada98cb65c27936ef25746a95b220af4227fde68f542eb8b06e3ca"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.385539 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" event={"ID":"bea39b1d-02dc-43ee-939b-1849fbd3bedd","Type":"ContainerStarted","Data":"8a5168fcd70c994da3785a7e12791ebd0ae046b5d22b6f7ce7890d11d9825bbc"} Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.409099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.409187 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.410411 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.413828 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.477169 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.510906 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.510953 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.514969 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.517171 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.610128 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.668190 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:22 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:22 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:22 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.668239 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:22 crc kubenswrapper[4813]: W1007 19:20:22.697159 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-4b2480eac3aa10533032cf946ef96f2bd5868ecbc485be46b548f8c53805f2b5 WatchSource:0}: Error finding container 4b2480eac3aa10533032cf946ef96f2bd5868ecbc485be46b548f8c53805f2b5: Status 404 returned error can't find the container with id 4b2480eac3aa10533032cf946ef96f2bd5868ecbc485be46b548f8c53805f2b5 Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.766199 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 07 19:20:22 crc kubenswrapper[4813]: W1007 19:20:22.785609 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd74fb651_4e63_4f47_9183_0a6fa0a9724f.slice/crio-dcf7eaabff409c548cb50608c8345de566373d2c2ef19dd2e72aeeb0560d5c05 WatchSource:0}: Error finding container dcf7eaabff409c548cb50608c8345de566373d2c2ef19dd2e72aeeb0560d5c05: Status 404 returned error can't find the container with id dcf7eaabff409c548cb50608c8345de566373d2c2ef19dd2e72aeeb0560d5c05 Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.786598 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 07 19:20:22 crc kubenswrapper[4813]: I1007 19:20:22.800052 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:23 crc kubenswrapper[4813]: W1007 19:20:23.038386 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-554ddb280394e0a73933f921869d0a1dd54957d2894e0053c5cc8b085cec4b98 WatchSource:0}: Error finding container 554ddb280394e0a73933f921869d0a1dd54957d2894e0053c5cc8b085cec4b98: Status 404 returned error can't find the container with id 554ddb280394e0a73933f921869d0a1dd54957d2894e0053c5cc8b085cec4b98 Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.393940 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"554ddb280394e0a73933f921869d0a1dd54957d2894e0053c5cc8b085cec4b98"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.395693 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" event={"ID":"bea39b1d-02dc-43ee-939b-1849fbd3bedd","Type":"ContainerStarted","Data":"11e3b51615df3e92c1deb763d73f7f8747013526263ae6a525dfb835b0d88d0e"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.395811 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.397456 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c9802913cf924868b640881efc504c493e7c3974a2fd031fa0b319c378fcef7a"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.400944 4813 generic.go:334] "Generic (PLEG): container finished" podID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerID="846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143" exitCode=0 Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.400999 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerDied","Data":"846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.403562 4813 generic.go:334] "Generic (PLEG): container finished" podID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerID="baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188" exitCode=0 Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.403603 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerDied","Data":"baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.405678 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d74fb651-4e63-4f47-9183-0a6fa0a9724f","Type":"ContainerStarted","Data":"dcf7eaabff409c548cb50608c8345de566373d2c2ef19dd2e72aeeb0560d5c05"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.407647 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6aa31baacf649dc77a43dcabc01ccd8eaabc9d46503b3a3a7c91ca1ba101b450"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.407676 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4b2480eac3aa10533032cf946ef96f2bd5868ecbc485be46b548f8c53805f2b5"} Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.415781 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" podStartSLOduration=128.41576561 podStartE2EDuration="2m8.41576561s" podCreationTimestamp="2025-10-07 19:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:20:23.408444403 +0000 UTC m=+149.486700014" watchObservedRunningTime="2025-10-07 19:20:23.41576561 +0000 UTC m=+149.494021221" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.672838 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:23 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:23 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:23 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.673073 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.689893 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.710817 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.736578 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a10468a-612b-4d62-af42-0a94f36c1997-kube-api-access\") pod \"1a10468a-612b-4d62-af42-0a94f36c1997\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.736623 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1a10468a-612b-4d62-af42-0a94f36c1997-kubelet-dir\") pod \"1a10468a-612b-4d62-af42-0a94f36c1997\" (UID: \"1a10468a-612b-4d62-af42-0a94f36c1997\") " Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.736944 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a10468a-612b-4d62-af42-0a94f36c1997-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1a10468a-612b-4d62-af42-0a94f36c1997" (UID: "1a10468a-612b-4d62-af42-0a94f36c1997"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.743108 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a10468a-612b-4d62-af42-0a94f36c1997-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1a10468a-612b-4d62-af42-0a94f36c1997" (UID: "1a10468a-612b-4d62-af42-0a94f36c1997"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.837345 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99a37f2e-fef1-47f1-ac60-6504a968ebf8-config-volume\") pod \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.837428 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99a37f2e-fef1-47f1-ac60-6504a968ebf8-secret-volume\") pod \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.837476 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cj9k9\" (UniqueName: \"kubernetes.io/projected/99a37f2e-fef1-47f1-ac60-6504a968ebf8-kube-api-access-cj9k9\") pod \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\" (UID: \"99a37f2e-fef1-47f1-ac60-6504a968ebf8\") " Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.837830 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a10468a-612b-4d62-af42-0a94f36c1997-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.837854 4813 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1a10468a-612b-4d62-af42-0a94f36c1997-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.838882 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99a37f2e-fef1-47f1-ac60-6504a968ebf8-config-volume" (OuterVolumeSpecName: "config-volume") pod "99a37f2e-fef1-47f1-ac60-6504a968ebf8" (UID: "99a37f2e-fef1-47f1-ac60-6504a968ebf8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.842511 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99a37f2e-fef1-47f1-ac60-6504a968ebf8-kube-api-access-cj9k9" (OuterVolumeSpecName: "kube-api-access-cj9k9") pod "99a37f2e-fef1-47f1-ac60-6504a968ebf8" (UID: "99a37f2e-fef1-47f1-ac60-6504a968ebf8"). InnerVolumeSpecName "kube-api-access-cj9k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.842581 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99a37f2e-fef1-47f1-ac60-6504a968ebf8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "99a37f2e-fef1-47f1-ac60-6504a968ebf8" (UID: "99a37f2e-fef1-47f1-ac60-6504a968ebf8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.939221 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99a37f2e-fef1-47f1-ac60-6504a968ebf8-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.939256 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99a37f2e-fef1-47f1-ac60-6504a968ebf8-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:23 crc kubenswrapper[4813]: I1007 19:20:23.939265 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cj9k9\" (UniqueName: \"kubernetes.io/projected/99a37f2e-fef1-47f1-ac60-6504a968ebf8-kube-api-access-cj9k9\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.217939 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.222192 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rh8d8" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.420731 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"895aa683eba7a70bdbede3af3d28d5309f643bf5bcc769ec6fed9b67f64e9292"} Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.442658 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" event={"ID":"99a37f2e-fef1-47f1-ac60-6504a968ebf8","Type":"ContainerDied","Data":"6c62529ec0700671fcf27182e8ffd9b6c461b0160a1b80766665373d6504602f"} Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.442698 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c62529ec0700671fcf27182e8ffd9b6c461b0160a1b80766665373d6504602f" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.442774 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.447831 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"2520f30589650700a9c2d7271d841c63b41194b02b450eeb0e015ac7cfc28948"} Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.447906 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.461685 4813 generic.go:334] "Generic (PLEG): container finished" podID="d74fb651-4e63-4f47-9183-0a6fa0a9724f" containerID="2175dfbf97fcdc18b6ac9198f66d3defd89c0813d605dd0872734a5a189339e0" exitCode=0 Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.461795 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d74fb651-4e63-4f47-9183-0a6fa0a9724f","Type":"ContainerDied","Data":"2175dfbf97fcdc18b6ac9198f66d3defd89c0813d605dd0872734a5a189339e0"} Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.464938 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-rp89d" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.468822 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.470182 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1a10468a-612b-4d62-af42-0a94f36c1997","Type":"ContainerDied","Data":"5305280409b7054416056850868e6f93fe24eea5589ca31810ce506f11657f8b"} Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.470219 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5305280409b7054416056850868e6f93fe24eea5589ca31810ce506f11657f8b" Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.674544 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:24 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:24 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:24 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:24 crc kubenswrapper[4813]: I1007 19:20:24.674619 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.666373 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:25 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:25 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:25 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.666429 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.749016 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.891244 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kubelet-dir\") pod \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.891386 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kube-api-access\") pod \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\" (UID: \"d74fb651-4e63-4f47-9183-0a6fa0a9724f\") " Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.899868 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d74fb651-4e63-4f47-9183-0a6fa0a9724f" (UID: "d74fb651-4e63-4f47-9183-0a6fa0a9724f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.918032 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d74fb651-4e63-4f47-9183-0a6fa0a9724f" (UID: "d74fb651-4e63-4f47-9183-0a6fa0a9724f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.996474 4813 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:25 crc kubenswrapper[4813]: I1007 19:20:25.996504 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d74fb651-4e63-4f47-9183-0a6fa0a9724f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 07 19:20:26 crc kubenswrapper[4813]: I1007 19:20:26.511808 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"d74fb651-4e63-4f47-9183-0a6fa0a9724f","Type":"ContainerDied","Data":"dcf7eaabff409c548cb50608c8345de566373d2c2ef19dd2e72aeeb0560d5c05"} Oct 07 19:20:26 crc kubenswrapper[4813]: I1007 19:20:26.511862 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcf7eaabff409c548cb50608c8345de566373d2c2ef19dd2e72aeeb0560d5c05" Oct 07 19:20:26 crc kubenswrapper[4813]: I1007 19:20:26.511955 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 07 19:20:26 crc kubenswrapper[4813]: I1007 19:20:26.668759 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:26 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:26 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:26 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:26 crc kubenswrapper[4813]: I1007 19:20:26.668838 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:27 crc kubenswrapper[4813]: I1007 19:20:27.667246 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:27 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:27 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:27 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:27 crc kubenswrapper[4813]: I1007 19:20:27.667624 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.665728 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:28 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:28 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:28 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.665778 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.889876 4813 patch_prober.go:28] interesting pod/console-f9d7485db-ddzzh container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.889931 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-ddzzh" podUID="ba658322-d68e-4312-8283-4da69865e460" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.953887 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.953939 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.954230 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:28 crc kubenswrapper[4813]: I1007 19:20:28.954274 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:29 crc kubenswrapper[4813]: I1007 19:20:29.666591 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:29 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:29 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:29 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:29 crc kubenswrapper[4813]: I1007 19:20:29.666643 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:30 crc kubenswrapper[4813]: I1007 19:20:30.666996 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:30 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:30 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:30 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:30 crc kubenswrapper[4813]: I1007 19:20:30.667057 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:31 crc kubenswrapper[4813]: I1007 19:20:31.666580 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:31 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:31 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:31 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:31 crc kubenswrapper[4813]: I1007 19:20:31.666922 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:32 crc kubenswrapper[4813]: I1007 19:20:32.667346 4813 patch_prober.go:28] interesting pod/router-default-5444994796-nkdg4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 07 19:20:32 crc kubenswrapper[4813]: [-]has-synced failed: reason withheld Oct 07 19:20:32 crc kubenswrapper[4813]: [+]process-running ok Oct 07 19:20:32 crc kubenswrapper[4813]: healthz check failed Oct 07 19:20:32 crc kubenswrapper[4813]: I1007 19:20:32.667413 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nkdg4" podUID="ce72966f-f5d7-4257-983b-d630e5b91b63" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:20:33 crc kubenswrapper[4813]: I1007 19:20:33.666431 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:33 crc kubenswrapper[4813]: I1007 19:20:33.668711 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nkdg4" Oct 07 19:20:36 crc kubenswrapper[4813]: I1007 19:20:36.678301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:20:36 crc kubenswrapper[4813]: I1007 19:20:36.704407 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8c05824-c5ea-44b7-bd35-0c7d6561a61b-metrics-certs\") pod \"network-metrics-daemon-nz8v5\" (UID: \"c8c05824-c5ea-44b7-bd35-0c7d6561a61b\") " pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:20:36 crc kubenswrapper[4813]: I1007 19:20:36.826659 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nz8v5" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.909219 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.913533 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.945866 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.945947 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.946008 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.946804 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"0e7ce4c9acfdd3677cb842e22f61ee8b315699db4dfd47a6900577be1bbf75ea"} pod="openshift-console/downloads-7954f5f757-5t4w8" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.946961 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" containerID="cri-o://0e7ce4c9acfdd3677cb842e22f61ee8b315699db4dfd47a6900577be1bbf75ea" gracePeriod=2 Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.947362 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.947584 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.948222 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:38 crc kubenswrapper[4813]: I1007 19:20:38.948247 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:39 crc kubenswrapper[4813]: I1007 19:20:39.617035 4813 generic.go:334] "Generic (PLEG): container finished" podID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerID="0e7ce4c9acfdd3677cb842e22f61ee8b315699db4dfd47a6900577be1bbf75ea" exitCode=0 Oct 07 19:20:39 crc kubenswrapper[4813]: I1007 19:20:39.617225 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-5t4w8" event={"ID":"0c06d185-7b7a-448a-8b8b-dcd5a0560a20","Type":"ContainerDied","Data":"0e7ce4c9acfdd3677cb842e22f61ee8b315699db4dfd47a6900577be1bbf75ea"} Oct 07 19:20:41 crc kubenswrapper[4813]: I1007 19:20:41.486559 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:20:48 crc kubenswrapper[4813]: I1007 19:20:48.945296 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:48 crc kubenswrapper[4813]: I1007 19:20:48.945891 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:20:49 crc kubenswrapper[4813]: I1007 19:20:49.697274 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tcqtn" Oct 07 19:20:52 crc kubenswrapper[4813]: I1007 19:20:52.081621 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:20:52 crc kubenswrapper[4813]: I1007 19:20:52.081730 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:20:58 crc kubenswrapper[4813]: I1007 19:20:58.945082 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:20:58 crc kubenswrapper[4813]: I1007 19:20:58.945750 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:02 crc kubenswrapper[4813]: E1007 19:21:02.362581 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 19:21:02 crc kubenswrapper[4813]: E1007 19:21:02.363242 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ppcvg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7szgp_openshift-marketplace(b0115c94-6b20-40f6-9507-6997ea307ad4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:02 crc kubenswrapper[4813]: E1007 19:21:02.364452 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7szgp" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" Oct 07 19:21:02 crc kubenswrapper[4813]: E1007 19:21:02.408415 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 19:21:02 crc kubenswrapper[4813]: E1007 19:21:02.408553 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5ftlb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-shzbz_openshift-marketplace(7a130b1a-2820-4e77-9a0a-80101a7eed1a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:02 crc kubenswrapper[4813]: E1007 19:21:02.409872 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-shzbz" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" Oct 07 19:21:02 crc kubenswrapper[4813]: I1007 19:21:02.806224 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.179529 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-shzbz" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.179558 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7szgp" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.269382 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.269774 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tdpx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gcrjl_openshift-marketplace(d21c85fa-7b58-4d49-84b5-caa9769bcaed): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.270965 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gcrjl" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.308740 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.308879 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kq2d6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-r6r8l_openshift-marketplace(391edf07-0597-4236-80a3-1a572239f351): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:04 crc kubenswrapper[4813]: E1007 19:21:04.310440 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-r6r8l" podUID="391edf07-0597-4236-80a3-1a572239f351" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.155307 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gcrjl" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.155536 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-r6r8l" podUID="391edf07-0597-4236-80a3-1a572239f351" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.238296 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.238563 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c5srw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-cgcn7_openshift-marketplace(4d43fc22-f0c1-46f7-bf20-1245eac2b00f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.239786 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-cgcn7" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.240669 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.240879 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-spg5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-78vxl_openshift-marketplace(bd6bd646-c9f5-493f-8301-817d018a8f00): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:07 crc kubenswrapper[4813]: E1007 19:21:07.242043 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-78vxl" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" Oct 07 19:21:08 crc kubenswrapper[4813]: I1007 19:21:08.945791 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:21:08 crc kubenswrapper[4813]: I1007 19:21:08.946130 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:11 crc kubenswrapper[4813]: E1007 19:21:11.039510 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-cgcn7" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" Oct 07 19:21:11 crc kubenswrapper[4813]: E1007 19:21:11.046209 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-78vxl" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.646864 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.647919 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dgjgs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-76dql_openshift-marketplace(0caa8b2c-1ed6-4162-856a-1d08c578cdd8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.649564 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-76dql" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.693880 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.694040 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lrcc7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-9zbhw_openshift-marketplace(94ef8271-5185-462a-97b7-f33732ca1af4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.695179 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-9zbhw" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" Oct 07 19:21:12 crc kubenswrapper[4813]: I1007 19:21:12.857481 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-5t4w8" event={"ID":"0c06d185-7b7a-448a-8b8b-dcd5a0560a20","Type":"ContainerStarted","Data":"e85b9547209fb9f324c6296555af65e6f7d0cbc81e07e24273609ff2bc4e03d5"} Oct 07 19:21:12 crc kubenswrapper[4813]: I1007 19:21:12.858253 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:21:12 crc kubenswrapper[4813]: I1007 19:21:12.858642 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:21:12 crc kubenswrapper[4813]: I1007 19:21:12.858698 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.860761 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-76dql" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" Oct 07 19:21:12 crc kubenswrapper[4813]: E1007 19:21:12.861042 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-9zbhw" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" Oct 07 19:21:12 crc kubenswrapper[4813]: I1007 19:21:12.997686 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nz8v5"] Oct 07 19:21:13 crc kubenswrapper[4813]: W1007 19:21:13.018470 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8c05824_c5ea_44b7_bd35_0c7d6561a61b.slice/crio-3309dead83b1ef05e88f351fda03c188b45c35d9f61af9f65c249c3d33fda7b7 WatchSource:0}: Error finding container 3309dead83b1ef05e88f351fda03c188b45c35d9f61af9f65c249c3d33fda7b7: Status 404 returned error can't find the container with id 3309dead83b1ef05e88f351fda03c188b45c35d9f61af9f65c249c3d33fda7b7 Oct 07 19:21:13 crc kubenswrapper[4813]: I1007 19:21:13.864112 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" event={"ID":"c8c05824-c5ea-44b7-bd35-0c7d6561a61b","Type":"ContainerStarted","Data":"3309dead83b1ef05e88f351fda03c188b45c35d9f61af9f65c249c3d33fda7b7"} Oct 07 19:21:13 crc kubenswrapper[4813]: I1007 19:21:13.865058 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:21:13 crc kubenswrapper[4813]: I1007 19:21:13.865121 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:14 crc kubenswrapper[4813]: I1007 19:21:14.871592 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" event={"ID":"c8c05824-c5ea-44b7-bd35-0c7d6561a61b","Type":"ContainerStarted","Data":"4c7a6482c67e9cc17c819f14d85b8ff45b8aa37c2f93987413d4de4733a22114"} Oct 07 19:21:14 crc kubenswrapper[4813]: I1007 19:21:14.871682 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:21:14 crc kubenswrapper[4813]: I1007 19:21:14.872164 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:15 crc kubenswrapper[4813]: I1007 19:21:15.882764 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nz8v5" event={"ID":"c8c05824-c5ea-44b7-bd35-0c7d6561a61b","Type":"ContainerStarted","Data":"b84b135ff34b0787210f91ec14bd1ff78877bd9ef282ab50ce52ec1fdd8a1961"} Oct 07 19:21:15 crc kubenswrapper[4813]: I1007 19:21:15.905382 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-nz8v5" podStartSLOduration=181.905356658 podStartE2EDuration="3m1.905356658s" podCreationTimestamp="2025-10-07 19:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:21:15.903153255 +0000 UTC m=+201.981408906" watchObservedRunningTime="2025-10-07 19:21:15.905356658 +0000 UTC m=+201.983612309" Oct 07 19:21:18 crc kubenswrapper[4813]: I1007 19:21:18.945942 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:21:18 crc kubenswrapper[4813]: I1007 19:21:18.946276 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:18 crc kubenswrapper[4813]: I1007 19:21:18.945942 4813 patch_prober.go:28] interesting pod/downloads-7954f5f757-5t4w8 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Oct 07 19:21:18 crc kubenswrapper[4813]: I1007 19:21:18.946595 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-5t4w8" podUID="0c06d185-7b7a-448a-8b8b-dcd5a0560a20" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Oct 07 19:21:22 crc kubenswrapper[4813]: I1007 19:21:22.079026 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:21:22 crc kubenswrapper[4813]: I1007 19:21:22.079795 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:21:22 crc kubenswrapper[4813]: I1007 19:21:22.079864 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:21:22 crc kubenswrapper[4813]: I1007 19:21:22.080746 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:21:22 crc kubenswrapper[4813]: I1007 19:21:22.080883 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d" gracePeriod=600 Oct 07 19:21:23 crc kubenswrapper[4813]: I1007 19:21:23.937618 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d" exitCode=0 Oct 07 19:21:23 crc kubenswrapper[4813]: I1007 19:21:23.937909 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d"} Oct 07 19:21:25 crc kubenswrapper[4813]: I1007 19:21:25.951500 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"0830cd9c6c944f536089b77bf873249e6b2b285e17b46cc92095ca9afc2e0ff7"} Oct 07 19:21:25 crc kubenswrapper[4813]: I1007 19:21:25.954750 4813 generic.go:334] "Generic (PLEG): container finished" podID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerID="9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729" exitCode=0 Oct 07 19:21:25 crc kubenswrapper[4813]: I1007 19:21:25.954791 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shzbz" event={"ID":"7a130b1a-2820-4e77-9a0a-80101a7eed1a","Type":"ContainerDied","Data":"9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729"} Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.963210 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerStarted","Data":"030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344"} Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.965421 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shzbz" event={"ID":"7a130b1a-2820-4e77-9a0a-80101a7eed1a","Type":"ContainerStarted","Data":"ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42"} Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.967852 4813 generic.go:334] "Generic (PLEG): container finished" podID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerID="0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb" exitCode=0 Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.967928 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76dql" event={"ID":"0caa8b2c-1ed6-4162-856a-1d08c578cdd8","Type":"ContainerDied","Data":"0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb"} Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.970594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerStarted","Data":"8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9"} Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.976074 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerStarted","Data":"039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168"} Oct 07 19:21:26 crc kubenswrapper[4813]: I1007 19:21:26.978265 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerStarted","Data":"983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666"} Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.116141 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-shzbz" podStartSLOduration=3.664169173 podStartE2EDuration="1m10.116122329s" podCreationTimestamp="2025-10-07 19:20:17 +0000 UTC" firstStartedPulling="2025-10-07 19:20:20.160610646 +0000 UTC m=+146.238866257" lastFinishedPulling="2025-10-07 19:21:26.612563792 +0000 UTC m=+212.690819413" observedRunningTime="2025-10-07 19:21:27.114232055 +0000 UTC m=+213.192487666" watchObservedRunningTime="2025-10-07 19:21:27.116122329 +0000 UTC m=+213.194377940" Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.943616 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.943885 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.986948 4813 generic.go:334] "Generic (PLEG): container finished" podID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerID="030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344" exitCode=0 Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.987087 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerDied","Data":"030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344"} Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.989844 4813 generic.go:334] "Generic (PLEG): container finished" podID="391edf07-0597-4236-80a3-1a572239f351" containerID="8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9" exitCode=0 Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.989896 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerDied","Data":"8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9"} Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.996729 4813 generic.go:334] "Generic (PLEG): container finished" podID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerID="039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168" exitCode=0 Oct 07 19:21:27 crc kubenswrapper[4813]: I1007 19:21:27.996819 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerDied","Data":"039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168"} Oct 07 19:21:28 crc kubenswrapper[4813]: I1007 19:21:28.000810 4813 generic.go:334] "Generic (PLEG): container finished" podID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerID="983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666" exitCode=0 Oct 07 19:21:28 crc kubenswrapper[4813]: I1007 19:21:28.001692 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerDied","Data":"983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666"} Oct 07 19:21:28 crc kubenswrapper[4813]: I1007 19:21:28.967194 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-5t4w8" Oct 07 19:21:29 crc kubenswrapper[4813]: I1007 19:21:29.660134 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-shzbz" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="registry-server" probeResult="failure" output=< Oct 07 19:21:29 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:21:29 crc kubenswrapper[4813]: > Oct 07 19:21:31 crc kubenswrapper[4813]: I1007 19:21:31.036109 4813 generic.go:334] "Generic (PLEG): container finished" podID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerID="79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46" exitCode=0 Oct 07 19:21:31 crc kubenswrapper[4813]: I1007 19:21:31.036256 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerDied","Data":"79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46"} Oct 07 19:21:32 crc kubenswrapper[4813]: I1007 19:21:32.042581 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76dql" event={"ID":"0caa8b2c-1ed6-4162-856a-1d08c578cdd8","Type":"ContainerStarted","Data":"36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86"} Oct 07 19:21:32 crc kubenswrapper[4813]: I1007 19:21:32.063445 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-76dql" podStartSLOduration=3.365406097 podStartE2EDuration="1m13.063406059s" podCreationTimestamp="2025-10-07 19:20:19 +0000 UTC" firstStartedPulling="2025-10-07 19:20:21.370284734 +0000 UTC m=+147.448540335" lastFinishedPulling="2025-10-07 19:21:31.068284676 +0000 UTC m=+217.146540297" observedRunningTime="2025-10-07 19:21:32.059428557 +0000 UTC m=+218.137684168" watchObservedRunningTime="2025-10-07 19:21:32.063406059 +0000 UTC m=+218.141661670" Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.050126 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerStarted","Data":"db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92"} Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.051975 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerStarted","Data":"70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7"} Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.053820 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerStarted","Data":"5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5"} Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.055295 4813 generic.go:334] "Generic (PLEG): container finished" podID="94ef8271-5185-462a-97b7-f33732ca1af4" containerID="f0d5f8276d9536d28e2c55b63328c2bb3aef738843e88898fe498a46f73d9bd5" exitCode=0 Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.055354 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zbhw" event={"ID":"94ef8271-5185-462a-97b7-f33732ca1af4","Type":"ContainerDied","Data":"f0d5f8276d9536d28e2c55b63328c2bb3aef738843e88898fe498a46f73d9bd5"} Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.058618 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerStarted","Data":"37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662"} Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.060558 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerStarted","Data":"714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5"} Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.073850 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-78vxl" podStartSLOduration=4.214635797 podStartE2EDuration="1m13.073836915s" podCreationTimestamp="2025-10-07 19:20:20 +0000 UTC" firstStartedPulling="2025-10-07 19:20:23.402461684 +0000 UTC m=+149.480717285" lastFinishedPulling="2025-10-07 19:21:32.261662792 +0000 UTC m=+218.339918403" observedRunningTime="2025-10-07 19:21:33.070562983 +0000 UTC m=+219.148818594" watchObservedRunningTime="2025-10-07 19:21:33.073836915 +0000 UTC m=+219.152092526" Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.090613 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cgcn7" podStartSLOduration=4.611146513 podStartE2EDuration="1m13.090600388s" podCreationTimestamp="2025-10-07 19:20:20 +0000 UTC" firstStartedPulling="2025-10-07 19:20:23.404721118 +0000 UTC m=+149.482976729" lastFinishedPulling="2025-10-07 19:21:31.884174993 +0000 UTC m=+217.962430604" observedRunningTime="2025-10-07 19:21:33.087051178 +0000 UTC m=+219.165306789" watchObservedRunningTime="2025-10-07 19:21:33.090600388 +0000 UTC m=+219.168855999" Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.139182 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7szgp" podStartSLOduration=3.904905024 podStartE2EDuration="1m16.139164088s" podCreationTimestamp="2025-10-07 19:20:17 +0000 UTC" firstStartedPulling="2025-10-07 19:20:20.14655047 +0000 UTC m=+146.224806081" lastFinishedPulling="2025-10-07 19:21:32.380809534 +0000 UTC m=+218.459065145" observedRunningTime="2025-10-07 19:21:33.136509593 +0000 UTC m=+219.214765204" watchObservedRunningTime="2025-10-07 19:21:33.139164088 +0000 UTC m=+219.217419699" Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.155357 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r6r8l" podStartSLOduration=4.510943152 podStartE2EDuration="1m16.155341305s" podCreationTimestamp="2025-10-07 19:20:17 +0000 UTC" firstStartedPulling="2025-10-07 19:20:20.224565711 +0000 UTC m=+146.302821322" lastFinishedPulling="2025-10-07 19:21:31.868963864 +0000 UTC m=+217.947219475" observedRunningTime="2025-10-07 19:21:33.151082435 +0000 UTC m=+219.229338046" watchObservedRunningTime="2025-10-07 19:21:33.155341305 +0000 UTC m=+219.233596916" Oct 07 19:21:33 crc kubenswrapper[4813]: I1007 19:21:33.170955 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gcrjl" podStartSLOduration=4.174185071 podStartE2EDuration="1m16.170938435s" podCreationTimestamp="2025-10-07 19:20:17 +0000 UTC" firstStartedPulling="2025-10-07 19:20:20.257302354 +0000 UTC m=+146.335557965" lastFinishedPulling="2025-10-07 19:21:32.254055708 +0000 UTC m=+218.332311329" observedRunningTime="2025-10-07 19:21:33.168213558 +0000 UTC m=+219.246469169" watchObservedRunningTime="2025-10-07 19:21:33.170938435 +0000 UTC m=+219.249194046" Oct 07 19:21:34 crc kubenswrapper[4813]: I1007 19:21:34.066799 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zbhw" event={"ID":"94ef8271-5185-462a-97b7-f33732ca1af4","Type":"ContainerStarted","Data":"9465c6769da89802601bb30e598a14fa1bed4a845b49d739885ea240331f6202"} Oct 07 19:21:34 crc kubenswrapper[4813]: I1007 19:21:34.087215 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9zbhw" podStartSLOduration=2.977395981 podStartE2EDuration="1m15.087198354s" podCreationTimestamp="2025-10-07 19:20:19 +0000 UTC" firstStartedPulling="2025-10-07 19:20:21.369495371 +0000 UTC m=+147.447750982" lastFinishedPulling="2025-10-07 19:21:33.479297744 +0000 UTC m=+219.557553355" observedRunningTime="2025-10-07 19:21:34.086665339 +0000 UTC m=+220.164920950" watchObservedRunningTime="2025-10-07 19:21:34.087198354 +0000 UTC m=+220.165453955" Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.065265 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g4swz"] Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.712626 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.712682 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.786136 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.899553 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.899585 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:21:37 crc kubenswrapper[4813]: I1007 19:21:37.955411 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.007359 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.041859 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.125810 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.140576 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.142610 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.144257 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:21:38 crc kubenswrapper[4813]: I1007 19:21:38.185706 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.138202 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.639590 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.639810 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.646046 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r6r8l"] Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.684288 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.926466 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.926565 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:21:39 crc kubenswrapper[4813]: I1007 19:21:39.966657 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.131751 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.132525 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.237715 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shzbz"] Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.238463 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-shzbz" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="registry-server" containerID="cri-o://ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42" gracePeriod=2 Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.581496 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.714617 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-catalog-content\") pod \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.714679 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ftlb\" (UniqueName: \"kubernetes.io/projected/7a130b1a-2820-4e77-9a0a-80101a7eed1a-kube-api-access-5ftlb\") pod \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.714721 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-utilities\") pod \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\" (UID: \"7a130b1a-2820-4e77-9a0a-80101a7eed1a\") " Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.716180 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-utilities" (OuterVolumeSpecName: "utilities") pod "7a130b1a-2820-4e77-9a0a-80101a7eed1a" (UID: "7a130b1a-2820-4e77-9a0a-80101a7eed1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.737611 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a130b1a-2820-4e77-9a0a-80101a7eed1a-kube-api-access-5ftlb" (OuterVolumeSpecName: "kube-api-access-5ftlb") pod "7a130b1a-2820-4e77-9a0a-80101a7eed1a" (UID: "7a130b1a-2820-4e77-9a0a-80101a7eed1a"). InnerVolumeSpecName "kube-api-access-5ftlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.752483 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.752528 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.760649 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a130b1a-2820-4e77-9a0a-80101a7eed1a" (UID: "7a130b1a-2820-4e77-9a0a-80101a7eed1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.794033 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.816024 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.816053 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ftlb\" (UniqueName: \"kubernetes.io/projected/7a130b1a-2820-4e77-9a0a-80101a7eed1a-kube-api-access-5ftlb\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:40 crc kubenswrapper[4813]: I1007 19:21:40.816064 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a130b1a-2820-4e77-9a0a-80101a7eed1a-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.104814 4813 generic.go:334] "Generic (PLEG): container finished" podID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerID="ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42" exitCode=0 Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.104911 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shzbz" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.104949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shzbz" event={"ID":"7a130b1a-2820-4e77-9a0a-80101a7eed1a","Type":"ContainerDied","Data":"ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42"} Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.104976 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shzbz" event={"ID":"7a130b1a-2820-4e77-9a0a-80101a7eed1a","Type":"ContainerDied","Data":"a1bf09d9a7052985e30eb092a7f2e24908d8baa46e8483eba59c432fe79846f3"} Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.104996 4813 scope.go:117] "RemoveContainer" containerID="ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.105251 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r6r8l" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="registry-server" containerID="cri-o://37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662" gracePeriod=2 Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.124703 4813 scope.go:117] "RemoveContainer" containerID="9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.143810 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.148031 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shzbz"] Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.151043 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.151079 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.152077 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-shzbz"] Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.164556 4813 scope.go:117] "RemoveContainer" containerID="84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.187120 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.218695 4813 scope.go:117] "RemoveContainer" containerID="ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42" Oct 07 19:21:41 crc kubenswrapper[4813]: E1007 19:21:41.219097 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42\": container with ID starting with ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42 not found: ID does not exist" containerID="ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.219136 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42"} err="failed to get container status \"ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42\": rpc error: code = NotFound desc = could not find container \"ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42\": container with ID starting with ad8a3426bcdf866b77eb68e4707dc90d9ff1a07c0ea4e7b1618bf78c05f83b42 not found: ID does not exist" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.219163 4813 scope.go:117] "RemoveContainer" containerID="9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729" Oct 07 19:21:41 crc kubenswrapper[4813]: E1007 19:21:41.219477 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729\": container with ID starting with 9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729 not found: ID does not exist" containerID="9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.219505 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729"} err="failed to get container status \"9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729\": rpc error: code = NotFound desc = could not find container \"9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729\": container with ID starting with 9e5257acf96b0c5e709922ea914f4daa8dbde58a701cddb74c1468497660f729 not found: ID does not exist" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.219551 4813 scope.go:117] "RemoveContainer" containerID="84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32" Oct 07 19:21:41 crc kubenswrapper[4813]: E1007 19:21:41.219747 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32\": container with ID starting with 84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32 not found: ID does not exist" containerID="84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.219773 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32"} err="failed to get container status \"84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32\": rpc error: code = NotFound desc = could not find container \"84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32\": container with ID starting with 84f9f0059e18f6a8128a2ed8ddd14cb1a2da69264f1e4e5c77528e3affd89a32 not found: ID does not exist" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.440093 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.525530 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-catalog-content\") pod \"391edf07-0597-4236-80a3-1a572239f351\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.525596 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq2d6\" (UniqueName: \"kubernetes.io/projected/391edf07-0597-4236-80a3-1a572239f351-kube-api-access-kq2d6\") pod \"391edf07-0597-4236-80a3-1a572239f351\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.525725 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-utilities\") pod \"391edf07-0597-4236-80a3-1a572239f351\" (UID: \"391edf07-0597-4236-80a3-1a572239f351\") " Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.526390 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-utilities" (OuterVolumeSpecName: "utilities") pod "391edf07-0597-4236-80a3-1a572239f351" (UID: "391edf07-0597-4236-80a3-1a572239f351"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.552519 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/391edf07-0597-4236-80a3-1a572239f351-kube-api-access-kq2d6" (OuterVolumeSpecName: "kube-api-access-kq2d6") pod "391edf07-0597-4236-80a3-1a572239f351" (UID: "391edf07-0597-4236-80a3-1a572239f351"). InnerVolumeSpecName "kube-api-access-kq2d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.575259 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "391edf07-0597-4236-80a3-1a572239f351" (UID: "391edf07-0597-4236-80a3-1a572239f351"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.627479 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.627512 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/391edf07-0597-4236-80a3-1a572239f351-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:41 crc kubenswrapper[4813]: I1007 19:21:41.627535 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq2d6\" (UniqueName: \"kubernetes.io/projected/391edf07-0597-4236-80a3-1a572239f351-kube-api-access-kq2d6\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.118662 4813 generic.go:334] "Generic (PLEG): container finished" podID="391edf07-0597-4236-80a3-1a572239f351" containerID="37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662" exitCode=0 Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.119042 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerDied","Data":"37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662"} Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.119098 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r6r8l" event={"ID":"391edf07-0597-4236-80a3-1a572239f351","Type":"ContainerDied","Data":"4e1bd5f7c10b767a1bd6e9ccc4aef1c19e8060520a5920647e0ac18f3aa9c67f"} Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.119117 4813 scope.go:117] "RemoveContainer" containerID="37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.119402 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r6r8l" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.134547 4813 scope.go:117] "RemoveContainer" containerID="8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.148214 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r6r8l"] Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.151677 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r6r8l"] Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.161489 4813 scope.go:117] "RemoveContainer" containerID="1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.167312 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.177518 4813 scope.go:117] "RemoveContainer" containerID="37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662" Oct 07 19:21:42 crc kubenswrapper[4813]: E1007 19:21:42.177890 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662\": container with ID starting with 37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662 not found: ID does not exist" containerID="37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.177922 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662"} err="failed to get container status \"37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662\": rpc error: code = NotFound desc = could not find container \"37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662\": container with ID starting with 37a11d62ef35d4076f0e6778fc6e3e86fe4c845283de3f5d831dc5f6830d3662 not found: ID does not exist" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.177938 4813 scope.go:117] "RemoveContainer" containerID="8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9" Oct 07 19:21:42 crc kubenswrapper[4813]: E1007 19:21:42.178196 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9\": container with ID starting with 8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9 not found: ID does not exist" containerID="8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.178224 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9"} err="failed to get container status \"8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9\": rpc error: code = NotFound desc = could not find container \"8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9\": container with ID starting with 8ed652eadbc190d110fc8cc1e7630a69d568f4381d3d8c1920fb55c6f9d50ee9 not found: ID does not exist" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.178239 4813 scope.go:117] "RemoveContainer" containerID="1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e" Oct 07 19:21:42 crc kubenswrapper[4813]: E1007 19:21:42.178516 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e\": container with ID starting with 1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e not found: ID does not exist" containerID="1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.178536 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e"} err="failed to get container status \"1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e\": rpc error: code = NotFound desc = could not find container \"1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e\": container with ID starting with 1a258ed446e9914074f2c7de71ae08be4c9913f77d415581c62f4c5c1670327e not found: ID does not exist" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.608895 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="391edf07-0597-4236-80a3-1a572239f351" path="/var/lib/kubelet/pods/391edf07-0597-4236-80a3-1a572239f351/volumes" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.609464 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" path="/var/lib/kubelet/pods/7a130b1a-2820-4e77-9a0a-80101a7eed1a/volumes" Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.640292 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-76dql"] Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.640515 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-76dql" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="registry-server" containerID="cri-o://36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86" gracePeriod=2 Oct 07 19:21:42 crc kubenswrapper[4813]: I1007 19:21:42.999654 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.125622 4813 generic.go:334] "Generic (PLEG): container finished" podID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerID="36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86" exitCode=0 Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.125689 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76dql" event={"ID":"0caa8b2c-1ed6-4162-856a-1d08c578cdd8","Type":"ContainerDied","Data":"36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86"} Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.125708 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-76dql" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.125729 4813 scope.go:117] "RemoveContainer" containerID="36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.125715 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-76dql" event={"ID":"0caa8b2c-1ed6-4162-856a-1d08c578cdd8","Type":"ContainerDied","Data":"1791fdd6535093e8fc031ab204d39a0e473a67795f096d051eaed44871f4d70b"} Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.141695 4813 scope.go:117] "RemoveContainer" containerID="0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.146675 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-utilities\") pod \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.147008 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-catalog-content\") pod \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.147083 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgjgs\" (UniqueName: \"kubernetes.io/projected/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-kube-api-access-dgjgs\") pod \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\" (UID: \"0caa8b2c-1ed6-4162-856a-1d08c578cdd8\") " Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.147209 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-utilities" (OuterVolumeSpecName: "utilities") pod "0caa8b2c-1ed6-4162-856a-1d08c578cdd8" (UID: "0caa8b2c-1ed6-4162-856a-1d08c578cdd8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.147428 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.154593 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-kube-api-access-dgjgs" (OuterVolumeSpecName: "kube-api-access-dgjgs") pod "0caa8b2c-1ed6-4162-856a-1d08c578cdd8" (UID: "0caa8b2c-1ed6-4162-856a-1d08c578cdd8"). InnerVolumeSpecName "kube-api-access-dgjgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.158628 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0caa8b2c-1ed6-4162-856a-1d08c578cdd8" (UID: "0caa8b2c-1ed6-4162-856a-1d08c578cdd8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.160903 4813 scope.go:117] "RemoveContainer" containerID="c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.172032 4813 scope.go:117] "RemoveContainer" containerID="36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86" Oct 07 19:21:43 crc kubenswrapper[4813]: E1007 19:21:43.172523 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86\": container with ID starting with 36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86 not found: ID does not exist" containerID="36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.172557 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86"} err="failed to get container status \"36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86\": rpc error: code = NotFound desc = could not find container \"36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86\": container with ID starting with 36703020dd9241d7adeaf7384a80894bb990e422c71dd2b937ef12b404b92d86 not found: ID does not exist" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.172575 4813 scope.go:117] "RemoveContainer" containerID="0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb" Oct 07 19:21:43 crc kubenswrapper[4813]: E1007 19:21:43.172875 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb\": container with ID starting with 0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb not found: ID does not exist" containerID="0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.172896 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb"} err="failed to get container status \"0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb\": rpc error: code = NotFound desc = could not find container \"0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb\": container with ID starting with 0f22293e890821fc8ba83c25cbaa092c1a09bac4d875d6a7928d53381582f7fb not found: ID does not exist" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.172909 4813 scope.go:117] "RemoveContainer" containerID="c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7" Oct 07 19:21:43 crc kubenswrapper[4813]: E1007 19:21:43.173169 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7\": container with ID starting with c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7 not found: ID does not exist" containerID="c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.173206 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7"} err="failed to get container status \"c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7\": rpc error: code = NotFound desc = could not find container \"c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7\": container with ID starting with c2f450cd7fb3df9dca76d6e90733e8c4e76e5ef44993c3a6ed7d3ac2614f68c7 not found: ID does not exist" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.249022 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.249064 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgjgs\" (UniqueName: \"kubernetes.io/projected/0caa8b2c-1ed6-4162-856a-1d08c578cdd8-kube-api-access-dgjgs\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.453287 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-76dql"] Oct 07 19:21:43 crc kubenswrapper[4813]: I1007 19:21:43.455872 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-76dql"] Oct 07 19:21:44 crc kubenswrapper[4813]: I1007 19:21:44.609260 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" path="/var/lib/kubelet/pods/0caa8b2c-1ed6-4162-856a-1d08c578cdd8/volumes" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.038685 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-78vxl"] Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.039151 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-78vxl" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="registry-server" containerID="cri-o://db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92" gracePeriod=2 Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.367917 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.476567 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-utilities\") pod \"bd6bd646-c9f5-493f-8301-817d018a8f00\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.476733 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spg5j\" (UniqueName: \"kubernetes.io/projected/bd6bd646-c9f5-493f-8301-817d018a8f00-kube-api-access-spg5j\") pod \"bd6bd646-c9f5-493f-8301-817d018a8f00\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.476786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-catalog-content\") pod \"bd6bd646-c9f5-493f-8301-817d018a8f00\" (UID: \"bd6bd646-c9f5-493f-8301-817d018a8f00\") " Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.477835 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-utilities" (OuterVolumeSpecName: "utilities") pod "bd6bd646-c9f5-493f-8301-817d018a8f00" (UID: "bd6bd646-c9f5-493f-8301-817d018a8f00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.487085 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd6bd646-c9f5-493f-8301-817d018a8f00-kube-api-access-spg5j" (OuterVolumeSpecName: "kube-api-access-spg5j") pod "bd6bd646-c9f5-493f-8301-817d018a8f00" (UID: "bd6bd646-c9f5-493f-8301-817d018a8f00"). InnerVolumeSpecName "kube-api-access-spg5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.553283 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd6bd646-c9f5-493f-8301-817d018a8f00" (UID: "bd6bd646-c9f5-493f-8301-817d018a8f00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.578191 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.578236 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd6bd646-c9f5-493f-8301-817d018a8f00-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:45 crc kubenswrapper[4813]: I1007 19:21:45.578247 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spg5j\" (UniqueName: \"kubernetes.io/projected/bd6bd646-c9f5-493f-8301-817d018a8f00-kube-api-access-spg5j\") on node \"crc\" DevicePath \"\"" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.144158 4813 generic.go:334] "Generic (PLEG): container finished" podID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerID="db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92" exitCode=0 Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.144205 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerDied","Data":"db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92"} Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.144235 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-78vxl" event={"ID":"bd6bd646-c9f5-493f-8301-817d018a8f00","Type":"ContainerDied","Data":"63958825e0a92697f58ad48893cab084c7eb9455457163fc46499a3ce5f3bdbf"} Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.144253 4813 scope.go:117] "RemoveContainer" containerID="db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.144320 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-78vxl" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.157276 4813 scope.go:117] "RemoveContainer" containerID="79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.179180 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-78vxl"] Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.181388 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-78vxl"] Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.185896 4813 scope.go:117] "RemoveContainer" containerID="846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.200096 4813 scope.go:117] "RemoveContainer" containerID="db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92" Oct 07 19:21:46 crc kubenswrapper[4813]: E1007 19:21:46.200602 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92\": container with ID starting with db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92 not found: ID does not exist" containerID="db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.200671 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92"} err="failed to get container status \"db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92\": rpc error: code = NotFound desc = could not find container \"db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92\": container with ID starting with db2280507f7abf5c1449f5cf1d8e8126861953664922241045d1c9c9d0b54a92 not found: ID does not exist" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.200801 4813 scope.go:117] "RemoveContainer" containerID="79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46" Oct 07 19:21:46 crc kubenswrapper[4813]: E1007 19:21:46.201453 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46\": container with ID starting with 79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46 not found: ID does not exist" containerID="79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.201500 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46"} err="failed to get container status \"79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46\": rpc error: code = NotFound desc = could not find container \"79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46\": container with ID starting with 79a1eb9fdc0465951d848373340ed6f1cb73d07ea9d9c52f0adb33ebf58bcd46 not found: ID does not exist" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.201517 4813 scope.go:117] "RemoveContainer" containerID="846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143" Oct 07 19:21:46 crc kubenswrapper[4813]: E1007 19:21:46.201941 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143\": container with ID starting with 846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143 not found: ID does not exist" containerID="846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.201991 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143"} err="failed to get container status \"846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143\": rpc error: code = NotFound desc = could not find container \"846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143\": container with ID starting with 846f316d63fcf1fb96f926c5e45dd405bc71d8310a52a1f5f86bc268901b2143 not found: ID does not exist" Oct 07 19:21:46 crc kubenswrapper[4813]: I1007 19:21:46.608101 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" path="/var/lib/kubelet/pods/bd6bd646-c9f5-493f-8301-817d018a8f00/volumes" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.112824 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerName="oauth-openshift" containerID="cri-o://6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4" gracePeriod=15 Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.503190 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.538843 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh"] Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539235 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a10468a-612b-4d62-af42-0a94f36c1997" containerName="pruner" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539245 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a10468a-612b-4d62-af42-0a94f36c1997" containerName="pruner" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539256 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539263 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539272 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539277 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539284 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539289 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539297 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539302 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539310 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539315 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539340 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539346 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539353 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539373 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539382 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerName="oauth-openshift" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539388 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerName="oauth-openshift" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539397 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539402 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539411 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539417 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539426 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539432 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539440 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99a37f2e-fef1-47f1-ac60-6504a968ebf8" containerName="collect-profiles" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539446 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="99a37f2e-fef1-47f1-ac60-6504a968ebf8" containerName="collect-profiles" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539453 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539458 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="extract-utilities" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539466 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539471 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="extract-content" Oct 07 19:22:02 crc kubenswrapper[4813]: E1007 19:22:02.539479 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d74fb651-4e63-4f47-9183-0a6fa0a9724f" containerName="pruner" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539485 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d74fb651-4e63-4f47-9183-0a6fa0a9724f" containerName="pruner" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539562 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a130b1a-2820-4e77-9a0a-80101a7eed1a" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539573 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="391edf07-0597-4236-80a3-1a572239f351" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539579 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d74fb651-4e63-4f47-9183-0a6fa0a9724f" containerName="pruner" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539588 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd6bd646-c9f5-493f-8301-817d018a8f00" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539597 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="99a37f2e-fef1-47f1-ac60-6504a968ebf8" containerName="collect-profiles" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539603 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a10468a-612b-4d62-af42-0a94f36c1997" containerName="pruner" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539615 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerName="oauth-openshift" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539624 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0caa8b2c-1ed6-4162-856a-1d08c578cdd8" containerName="registry-server" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.539952 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.556701 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh"] Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580451 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq2kn\" (UniqueName: \"kubernetes.io/projected/f4cef620-3f83-48c3-9894-ddef3458cfb5-kube-api-access-cq2kn\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580506 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-idp-0-file-data\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580547 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-session\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580573 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-login\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580599 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-ocp-branding-template\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-serving-cert\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580660 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-service-ca\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580696 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-router-certs\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580722 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-trusted-ca-bundle\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580758 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-provider-selection\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-error\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580810 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-dir\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580825 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-policies\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.580852 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-cliconfig\") pod \"f4cef620-3f83-48c3-9894-ddef3458cfb5\" (UID: \"f4cef620-3f83-48c3-9894-ddef3458cfb5\") " Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.581911 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.581967 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.584449 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.584896 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.585176 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.604128 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4cef620-3f83-48c3-9894-ddef3458cfb5-kube-api-access-cq2kn" (OuterVolumeSpecName: "kube-api-access-cq2kn") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "kube-api-access-cq2kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.608845 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.609073 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.621476 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.621630 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.621991 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.622218 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.622376 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.622433 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f4cef620-3f83-48c3-9894-ddef3458cfb5" (UID: "f4cef620-3f83-48c3-9894-ddef3458cfb5"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682453 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682500 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682518 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxzpc\" (UniqueName: \"kubernetes.io/projected/82059e30-3457-4b66-afe6-3b01594adcaa-kube-api-access-xxzpc\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682537 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682558 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682579 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-audit-policies\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682626 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/82059e30-3457-4b66-afe6-3b01594adcaa-audit-dir\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682648 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682663 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682683 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682699 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682729 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682744 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682760 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682797 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682808 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682817 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682828 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682838 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682847 4813 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682857 4813 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4cef620-3f83-48c3-9894-ddef3458cfb5-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682865 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682873 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq2kn\" (UniqueName: \"kubernetes.io/projected/f4cef620-3f83-48c3-9894-ddef3458cfb5-kube-api-access-cq2kn\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682882 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682891 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682899 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682908 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.682918 4813 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cef620-3f83-48c3-9894-ddef3458cfb5-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784216 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784492 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784578 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784655 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784740 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784805 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784871 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.784945 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785019 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785088 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxzpc\" (UniqueName: \"kubernetes.io/projected/82059e30-3457-4b66-afe6-3b01594adcaa-kube-api-access-xxzpc\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785152 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785223 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785299 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-audit-policies\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785401 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/82059e30-3457-4b66-afe6-3b01594adcaa-audit-dir\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.785516 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/82059e30-3457-4b66-afe6-3b01594adcaa-audit-dir\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.786984 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.789881 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-service-ca\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.790497 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-router-certs\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.790871 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-audit-policies\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.792743 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.793227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.793246 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.796211 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-session\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.796733 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-login\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.797204 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.798088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-template-error\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.798685 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/82059e30-3457-4b66-afe6-3b01594adcaa-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.806946 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxzpc\" (UniqueName: \"kubernetes.io/projected/82059e30-3457-4b66-afe6-3b01594adcaa-kube-api-access-xxzpc\") pod \"oauth-openshift-7fb5d9b995-8mwvh\" (UID: \"82059e30-3457-4b66-afe6-3b01594adcaa\") " pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:02 crc kubenswrapper[4813]: I1007 19:22:02.860104 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.244424 4813 generic.go:334] "Generic (PLEG): container finished" podID="f4cef620-3f83-48c3-9894-ddef3458cfb5" containerID="6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4" exitCode=0 Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.244470 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.244476 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" event={"ID":"f4cef620-3f83-48c3-9894-ddef3458cfb5","Type":"ContainerDied","Data":"6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4"} Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.244606 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-g4swz" event={"ID":"f4cef620-3f83-48c3-9894-ddef3458cfb5","Type":"ContainerDied","Data":"306cf5493be259683fa038a5e14f96efca5f1d61b8c5eb3f2a8d5d384febdc75"} Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.244636 4813 scope.go:117] "RemoveContainer" containerID="6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4" Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.262658 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh"] Oct 07 19:22:03 crc kubenswrapper[4813]: W1007 19:22:03.281086 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82059e30_3457_4b66_afe6_3b01594adcaa.slice/crio-8d17d775f0af3c6184e4bc903c566457feccb1c98dde090396da138a93abeda1 WatchSource:0}: Error finding container 8d17d775f0af3c6184e4bc903c566457feccb1c98dde090396da138a93abeda1: Status 404 returned error can't find the container with id 8d17d775f0af3c6184e4bc903c566457feccb1c98dde090396da138a93abeda1 Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.292457 4813 scope.go:117] "RemoveContainer" containerID="6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4" Oct 07 19:22:03 crc kubenswrapper[4813]: E1007 19:22:03.293306 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4\": container with ID starting with 6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4 not found: ID does not exist" containerID="6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4" Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.293350 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4"} err="failed to get container status \"6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4\": rpc error: code = NotFound desc = could not find container \"6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4\": container with ID starting with 6459344f0b7b4f6f2e2e34a2d59cfe4d24f5cfec31ea483d78d8c6c5874c5ef4 not found: ID does not exist" Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.312851 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g4swz"] Oct 07 19:22:03 crc kubenswrapper[4813]: I1007 19:22:03.318444 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-g4swz"] Oct 07 19:22:04 crc kubenswrapper[4813]: I1007 19:22:04.254182 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" event={"ID":"82059e30-3457-4b66-afe6-3b01594adcaa","Type":"ContainerStarted","Data":"c37f0903993005bccbce98be1f4d7627e929cbf6b385eadfba6da8c21177e7d9"} Oct 07 19:22:04 crc kubenswrapper[4813]: I1007 19:22:04.254667 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" event={"ID":"82059e30-3457-4b66-afe6-3b01594adcaa","Type":"ContainerStarted","Data":"8d17d775f0af3c6184e4bc903c566457feccb1c98dde090396da138a93abeda1"} Oct 07 19:22:04 crc kubenswrapper[4813]: I1007 19:22:04.254705 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:04 crc kubenswrapper[4813]: I1007 19:22:04.262896 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" Oct 07 19:22:04 crc kubenswrapper[4813]: I1007 19:22:04.285210 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7fb5d9b995-8mwvh" podStartSLOduration=27.285190164 podStartE2EDuration="27.285190164s" podCreationTimestamp="2025-10-07 19:21:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:22:04.28077869 +0000 UTC m=+250.359034341" watchObservedRunningTime="2025-10-07 19:22:04.285190164 +0000 UTC m=+250.363445775" Oct 07 19:22:04 crc kubenswrapper[4813]: I1007 19:22:04.610193 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4cef620-3f83-48c3-9894-ddef3458cfb5" path="/var/lib/kubelet/pods/f4cef620-3f83-48c3-9894-ddef3458cfb5/volumes" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.528070 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gcrjl"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.528950 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gcrjl" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="registry-server" containerID="cri-o://714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5" gracePeriod=30 Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.580412 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7szgp"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.580901 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7szgp" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="registry-server" containerID="cri-o://5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5" gracePeriod=30 Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.583380 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hhlh8"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.583583 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" containerID="cri-o://cac4c948f5eec78fb61abb2011836e168d6cb43888db6864d46923b7b6a36cfe" gracePeriod=30 Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.586781 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zbhw"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.587056 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9zbhw" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="registry-server" containerID="cri-o://9465c6769da89802601bb30e598a14fa1bed4a845b49d739885ea240331f6202" gracePeriod=30 Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.592279 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cgcn7"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.592355 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-trcm6"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.592801 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cgcn7" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="registry-server" containerID="cri-o://70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7" gracePeriod=30 Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.593864 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.598544 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-trcm6"] Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.754873 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/54507780-d039-4960-b75e-579f3b0aa7f5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.754930 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/54507780-d039-4960-b75e-579f3b0aa7f5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.754955 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2lc4\" (UniqueName: \"kubernetes.io/projected/54507780-d039-4960-b75e-579f3b0aa7f5-kube-api-access-z2lc4\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.857899 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/54507780-d039-4960-b75e-579f3b0aa7f5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.857947 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/54507780-d039-4960-b75e-579f3b0aa7f5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.857972 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2lc4\" (UniqueName: \"kubernetes.io/projected/54507780-d039-4960-b75e-579f3b0aa7f5-kube-api-access-z2lc4\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.859070 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/54507780-d039-4960-b75e-579f3b0aa7f5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.863558 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/54507780-d039-4960-b75e-579f3b0aa7f5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.880035 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2lc4\" (UniqueName: \"kubernetes.io/projected/54507780-d039-4960-b75e-579f3b0aa7f5-kube-api-access-z2lc4\") pod \"marketplace-operator-79b997595-trcm6\" (UID: \"54507780-d039-4960-b75e-579f3b0aa7f5\") " pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:14 crc kubenswrapper[4813]: I1007 19:22:14.973785 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.077283 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.188667 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.241055 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.264386 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-catalog-content\") pod \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.264452 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-utilities\") pod \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.264487 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5srw\" (UniqueName: \"kubernetes.io/projected/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-kube-api-access-c5srw\") pod \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\" (UID: \"4d43fc22-f0c1-46f7-bf20-1245eac2b00f\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.270900 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-kube-api-access-c5srw" (OuterVolumeSpecName: "kube-api-access-c5srw") pod "4d43fc22-f0c1-46f7-bf20-1245eac2b00f" (UID: "4d43fc22-f0c1-46f7-bf20-1245eac2b00f"). InnerVolumeSpecName "kube-api-access-c5srw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.277235 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-utilities" (OuterVolumeSpecName: "utilities") pod "4d43fc22-f0c1-46f7-bf20-1245eac2b00f" (UID: "4d43fc22-f0c1-46f7-bf20-1245eac2b00f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.320462 4813 generic.go:334] "Generic (PLEG): container finished" podID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerID="cac4c948f5eec78fb61abb2011836e168d6cb43888db6864d46923b7b6a36cfe" exitCode=0 Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.320530 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" event={"ID":"48819027-dd10-43a8-b2f9-18bbefcc9451","Type":"ContainerDied","Data":"cac4c948f5eec78fb61abb2011836e168d6cb43888db6864d46923b7b6a36cfe"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.329957 4813 generic.go:334] "Generic (PLEG): container finished" podID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerID="714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5" exitCode=0 Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.330030 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerDied","Data":"714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.330079 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gcrjl" event={"ID":"d21c85fa-7b58-4d49-84b5-caa9769bcaed","Type":"ContainerDied","Data":"508d71b4b230d206a7970ba663030522b906535d90cbfa23e943470ff3ab2f2b"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.330098 4813 scope.go:117] "RemoveContainer" containerID="714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.330199 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gcrjl" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.345036 4813 generic.go:334] "Generic (PLEG): container finished" podID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerID="70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7" exitCode=0 Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.345143 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerDied","Data":"70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.345177 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cgcn7" event={"ID":"4d43fc22-f0c1-46f7-bf20-1245eac2b00f","Type":"ContainerDied","Data":"d4dc8ce2a5a7be19f6127ca088d7a9ff4d73b343fd408a56d42428e0c015e106"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.345236 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cgcn7" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.357554 4813 generic.go:334] "Generic (PLEG): container finished" podID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerID="5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5" exitCode=0 Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.357797 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerDied","Data":"5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.357885 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7szgp" event={"ID":"b0115c94-6b20-40f6-9507-6997ea307ad4","Type":"ContainerDied","Data":"da0d609fb6851416cee7b4d0f0a85d19f967f453261c5fb0ab28039fcf82fbbb"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.358010 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7szgp" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.363515 4813 scope.go:117] "RemoveContainer" containerID="039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.364959 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppcvg\" (UniqueName: \"kubernetes.io/projected/b0115c94-6b20-40f6-9507-6997ea307ad4-kube-api-access-ppcvg\") pod \"b0115c94-6b20-40f6-9507-6997ea307ad4\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.365002 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-utilities\") pod \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.365047 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdpx2\" (UniqueName: \"kubernetes.io/projected/d21c85fa-7b58-4d49-84b5-caa9769bcaed-kube-api-access-tdpx2\") pod \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.365078 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-utilities\") pod \"b0115c94-6b20-40f6-9507-6997ea307ad4\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.365149 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-catalog-content\") pod \"b0115c94-6b20-40f6-9507-6997ea307ad4\" (UID: \"b0115c94-6b20-40f6-9507-6997ea307ad4\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.365176 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-catalog-content\") pod \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\" (UID: \"d21c85fa-7b58-4d49-84b5-caa9769bcaed\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.367005 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-utilities" (OuterVolumeSpecName: "utilities") pod "d21c85fa-7b58-4d49-84b5-caa9769bcaed" (UID: "d21c85fa-7b58-4d49-84b5-caa9769bcaed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.370619 4813 generic.go:334] "Generic (PLEG): container finished" podID="94ef8271-5185-462a-97b7-f33732ca1af4" containerID="9465c6769da89802601bb30e598a14fa1bed4a845b49d739885ea240331f6202" exitCode=0 Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.370658 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zbhw" event={"ID":"94ef8271-5185-462a-97b7-f33732ca1af4","Type":"ContainerDied","Data":"9465c6769da89802601bb30e598a14fa1bed4a845b49d739885ea240331f6202"} Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.374393 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d43fc22-f0c1-46f7-bf20-1245eac2b00f" (UID: "4d43fc22-f0c1-46f7-bf20-1245eac2b00f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.377638 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-utilities" (OuterVolumeSpecName: "utilities") pod "b0115c94-6b20-40f6-9507-6997ea307ad4" (UID: "b0115c94-6b20-40f6-9507-6997ea307ad4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.377838 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d21c85fa-7b58-4d49-84b5-caa9769bcaed-kube-api-access-tdpx2" (OuterVolumeSpecName: "kube-api-access-tdpx2") pod "d21c85fa-7b58-4d49-84b5-caa9769bcaed" (UID: "d21c85fa-7b58-4d49-84b5-caa9769bcaed"). InnerVolumeSpecName "kube-api-access-tdpx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.379921 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0115c94-6b20-40f6-9507-6997ea307ad4-kube-api-access-ppcvg" (OuterVolumeSpecName: "kube-api-access-ppcvg") pod "b0115c94-6b20-40f6-9507-6997ea307ad4" (UID: "b0115c94-6b20-40f6-9507-6997ea307ad4"). InnerVolumeSpecName "kube-api-access-ppcvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384061 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdpx2\" (UniqueName: \"kubernetes.io/projected/d21c85fa-7b58-4d49-84b5-caa9769bcaed-kube-api-access-tdpx2\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384091 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384102 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384112 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384123 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5srw\" (UniqueName: \"kubernetes.io/projected/4d43fc22-f0c1-46f7-bf20-1245eac2b00f-kube-api-access-c5srw\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384130 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppcvg\" (UniqueName: \"kubernetes.io/projected/b0115c94-6b20-40f6-9507-6997ea307ad4-kube-api-access-ppcvg\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.384138 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.419963 4813 scope.go:117] "RemoveContainer" containerID="896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.430665 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d21c85fa-7b58-4d49-84b5-caa9769bcaed" (UID: "d21c85fa-7b58-4d49-84b5-caa9769bcaed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.440846 4813 scope.go:117] "RemoveContainer" containerID="714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.441665 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5\": container with ID starting with 714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5 not found: ID does not exist" containerID="714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.441707 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5"} err="failed to get container status \"714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5\": rpc error: code = NotFound desc = could not find container \"714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5\": container with ID starting with 714508bf906ac9ae19cea83165f5beab9092d057a15cf42b5aab4edbff37f8b5 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.441736 4813 scope.go:117] "RemoveContainer" containerID="039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.444849 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168\": container with ID starting with 039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168 not found: ID does not exist" containerID="039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.444883 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168"} err="failed to get container status \"039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168\": rpc error: code = NotFound desc = could not find container \"039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168\": container with ID starting with 039c0e891912609c44ace013ec8dc77f6a42e5032b0a01970a544ec721cb2168 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.444904 4813 scope.go:117] "RemoveContainer" containerID="896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.445754 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329\": container with ID starting with 896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329 not found: ID does not exist" containerID="896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.445780 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329"} err="failed to get container status \"896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329\": rpc error: code = NotFound desc = could not find container \"896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329\": container with ID starting with 896a595dc7aa81a64f18f4c4be4c21b2f3e986dd5b83a0a38d9a296ff42e1329 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.445796 4813 scope.go:117] "RemoveContainer" containerID="70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.454973 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0115c94-6b20-40f6-9507-6997ea307ad4" (UID: "b0115c94-6b20-40f6-9507-6997ea307ad4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.469922 4813 scope.go:117] "RemoveContainer" containerID="983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.486423 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0115c94-6b20-40f6-9507-6997ea307ad4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.486447 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d21c85fa-7b58-4d49-84b5-caa9769bcaed-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.488383 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.490509 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-trcm6"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.509235 4813 scope.go:117] "RemoveContainer" containerID="baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.530725 4813 scope.go:117] "RemoveContainer" containerID="70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.531100 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7\": container with ID starting with 70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7 not found: ID does not exist" containerID="70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.531140 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7"} err="failed to get container status \"70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7\": rpc error: code = NotFound desc = could not find container \"70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7\": container with ID starting with 70b4b070a3a64efa4129ebdd7b9851a95f15dc87dc237b39140d46b4e5c0b4b7 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.531172 4813 scope.go:117] "RemoveContainer" containerID="983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.531454 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666\": container with ID starting with 983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666 not found: ID does not exist" containerID="983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.531478 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666"} err="failed to get container status \"983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666\": rpc error: code = NotFound desc = could not find container \"983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666\": container with ID starting with 983d353a1ccdfb2ef908ac9ee2d8460042bfd9e3b70bd3b38022bd91a361d666 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.531499 4813 scope.go:117] "RemoveContainer" containerID="baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.531688 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188\": container with ID starting with baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188 not found: ID does not exist" containerID="baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.531706 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188"} err="failed to get container status \"baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188\": rpc error: code = NotFound desc = could not find container \"baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188\": container with ID starting with baca52a7690893a195283525b3b946839742e73ca71da4d3690f571f1cd87188 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.531718 4813 scope.go:117] "RemoveContainer" containerID="5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.556695 4813 scope.go:117] "RemoveContainer" containerID="030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.587971 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrcc7\" (UniqueName: \"kubernetes.io/projected/94ef8271-5185-462a-97b7-f33732ca1af4-kube-api-access-lrcc7\") pod \"94ef8271-5185-462a-97b7-f33732ca1af4\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.588047 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-catalog-content\") pod \"94ef8271-5185-462a-97b7-f33732ca1af4\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.591542 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94ef8271-5185-462a-97b7-f33732ca1af4-kube-api-access-lrcc7" (OuterVolumeSpecName: "kube-api-access-lrcc7") pod "94ef8271-5185-462a-97b7-f33732ca1af4" (UID: "94ef8271-5185-462a-97b7-f33732ca1af4"). InnerVolumeSpecName "kube-api-access-lrcc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.601418 4813 scope.go:117] "RemoveContainer" containerID="f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.608789 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94ef8271-5185-462a-97b7-f33732ca1af4" (UID: "94ef8271-5185-462a-97b7-f33732ca1af4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.615200 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.623026 4813 scope.go:117] "RemoveContainer" containerID="5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.624003 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5\": container with ID starting with 5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5 not found: ID does not exist" containerID="5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.624178 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5"} err="failed to get container status \"5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5\": rpc error: code = NotFound desc = could not find container \"5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5\": container with ID starting with 5a23418c9b70eba793f41b576749bf8b22c101d700d785f8555368c9a4413ee5 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.624515 4813 scope.go:117] "RemoveContainer" containerID="030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.646713 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344\": container with ID starting with 030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344 not found: ID does not exist" containerID="030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.646752 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344"} err="failed to get container status \"030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344\": rpc error: code = NotFound desc = could not find container \"030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344\": container with ID starting with 030e60f2d76990fabd390572d0cb90a159447b6477b4ec5ec0fe5b2c8fe35344 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.646775 4813 scope.go:117] "RemoveContainer" containerID="f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30" Oct 07 19:22:15 crc kubenswrapper[4813]: E1007 19:22:15.646975 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30\": container with ID starting with f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30 not found: ID does not exist" containerID="f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.646989 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30"} err="failed to get container status \"f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30\": rpc error: code = NotFound desc = could not find container \"f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30\": container with ID starting with f9461e7f68e55a336c84eb0160b638e6a56f51f1b9827593ad408abc2c970c30 not found: ID does not exist" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.685000 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cgcn7"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.689593 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-utilities\") pod \"94ef8271-5185-462a-97b7-f33732ca1af4\" (UID: \"94ef8271-5185-462a-97b7-f33732ca1af4\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.689801 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrcc7\" (UniqueName: \"kubernetes.io/projected/94ef8271-5185-462a-97b7-f33732ca1af4-kube-api-access-lrcc7\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.689811 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.689856 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cgcn7"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.690489 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-utilities" (OuterVolumeSpecName: "utilities") pod "94ef8271-5185-462a-97b7-f33732ca1af4" (UID: "94ef8271-5185-462a-97b7-f33732ca1af4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.707286 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7szgp"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.711277 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7szgp"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.724882 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gcrjl"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.736024 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gcrjl"] Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.790478 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjf4h\" (UniqueName: \"kubernetes.io/projected/48819027-dd10-43a8-b2f9-18bbefcc9451-kube-api-access-gjf4h\") pod \"48819027-dd10-43a8-b2f9-18bbefcc9451\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.790762 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-operator-metrics\") pod \"48819027-dd10-43a8-b2f9-18bbefcc9451\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.790859 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-trusted-ca\") pod \"48819027-dd10-43a8-b2f9-18bbefcc9451\" (UID: \"48819027-dd10-43a8-b2f9-18bbefcc9451\") " Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.791092 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94ef8271-5185-462a-97b7-f33732ca1af4-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.791690 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "48819027-dd10-43a8-b2f9-18bbefcc9451" (UID: "48819027-dd10-43a8-b2f9-18bbefcc9451"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.793372 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "48819027-dd10-43a8-b2f9-18bbefcc9451" (UID: "48819027-dd10-43a8-b2f9-18bbefcc9451"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.793585 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48819027-dd10-43a8-b2f9-18bbefcc9451-kube-api-access-gjf4h" (OuterVolumeSpecName: "kube-api-access-gjf4h") pod "48819027-dd10-43a8-b2f9-18bbefcc9451" (UID: "48819027-dd10-43a8-b2f9-18bbefcc9451"). InnerVolumeSpecName "kube-api-access-gjf4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.892297 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjf4h\" (UniqueName: \"kubernetes.io/projected/48819027-dd10-43a8-b2f9-18bbefcc9451-kube-api-access-gjf4h\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.892359 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:15 crc kubenswrapper[4813]: I1007 19:22:15.892369 4813 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/48819027-dd10-43a8-b2f9-18bbefcc9451-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219478 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kzf7b"] Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219647 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219657 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219667 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219672 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219679 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219685 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219694 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219700 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219708 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219713 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219722 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219727 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219735 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219741 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219749 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219755 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219763 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219768 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219777 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219782 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219789 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219795 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219801 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219806 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="extract-utilities" Oct 07 19:22:16 crc kubenswrapper[4813]: E1007 19:22:16.219814 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219819 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="extract-content" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219894 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219904 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219911 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219921 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" containerName="marketplace-operator" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.219929 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" containerName="registry-server" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.220516 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.224709 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.239208 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kzf7b"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.301397 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8zhz\" (UniqueName: \"kubernetes.io/projected/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-kube-api-access-c8zhz\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.301463 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-catalog-content\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.301508 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-utilities\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.378109 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zbhw" event={"ID":"94ef8271-5185-462a-97b7-f33732ca1af4","Type":"ContainerDied","Data":"a47d0d8a805306feee0c2cec5a4c310963349e4be2a297cd75930a78ae90bd05"} Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.378149 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zbhw" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.378168 4813 scope.go:117] "RemoveContainer" containerID="9465c6769da89802601bb30e598a14fa1bed4a845b49d739885ea240331f6202" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.379959 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.380163 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hhlh8" event={"ID":"48819027-dd10-43a8-b2f9-18bbefcc9451","Type":"ContainerDied","Data":"807b2342038b4bb72372fa5100dbba31a0f28c0b83283735e20a9b9ed840b6be"} Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.389904 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" event={"ID":"54507780-d039-4960-b75e-579f3b0aa7f5","Type":"ContainerStarted","Data":"c633d4e871f5ef31b9f284baa100c7807893cbd068e0e852f90922839773c3b7"} Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.389940 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" event={"ID":"54507780-d039-4960-b75e-579f3b0aa7f5","Type":"ContainerStarted","Data":"6df8747fea5f327d59d80f2138f007cab9a93095623008a986b8761ddd3c79a8"} Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.390440 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.393472 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.397794 4813 scope.go:117] "RemoveContainer" containerID="f0d5f8276d9536d28e2c55b63328c2bb3aef738843e88898fe498a46f73d9bd5" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.402271 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-utilities\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.402352 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8zhz\" (UniqueName: \"kubernetes.io/projected/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-kube-api-access-c8zhz\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.402401 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-catalog-content\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.403477 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-utilities\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.404826 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-catalog-content\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.421450 4813 scope.go:117] "RemoveContainer" containerID="df11a89c5999d55191eb6a166f7f38bbe24d00cd2961b79f9a07fa69661c7046" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.425501 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8zhz\" (UniqueName: \"kubernetes.io/projected/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-kube-api-access-c8zhz\") pod \"community-operators-kzf7b\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.438801 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-trcm6" podStartSLOduration=2.438775908 podStartE2EDuration="2.438775908s" podCreationTimestamp="2025-10-07 19:22:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:22:16.427688795 +0000 UTC m=+262.505944406" watchObservedRunningTime="2025-10-07 19:22:16.438775908 +0000 UTC m=+262.517031519" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.450502 4813 scope.go:117] "RemoveContainer" containerID="cac4c948f5eec78fb61abb2011836e168d6cb43888db6864d46923b7b6a36cfe" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.453037 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hhlh8"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.455950 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hhlh8"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.479643 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zbhw"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.482815 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zbhw"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.536559 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.610312 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48819027-dd10-43a8-b2f9-18bbefcc9451" path="/var/lib/kubelet/pods/48819027-dd10-43a8-b2f9-18bbefcc9451/volumes" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.611051 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d43fc22-f0c1-46f7-bf20-1245eac2b00f" path="/var/lib/kubelet/pods/4d43fc22-f0c1-46f7-bf20-1245eac2b00f/volumes" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.612792 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94ef8271-5185-462a-97b7-f33732ca1af4" path="/var/lib/kubelet/pods/94ef8271-5185-462a-97b7-f33732ca1af4/volumes" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.613930 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0115c94-6b20-40f6-9507-6997ea307ad4" path="/var/lib/kubelet/pods/b0115c94-6b20-40f6-9507-6997ea307ad4/volumes" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.614552 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d21c85fa-7b58-4d49-84b5-caa9769bcaed" path="/var/lib/kubelet/pods/d21c85fa-7b58-4d49-84b5-caa9769bcaed/volumes" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.797267 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9pr84"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.798191 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.799992 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.805910 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9pr84"] Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.906154 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be8b38bc-131f-4919-91a8-5c761a14a2b1-utilities\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.906195 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sr8b\" (UniqueName: \"kubernetes.io/projected/be8b38bc-131f-4919-91a8-5c761a14a2b1-kube-api-access-4sr8b\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.906224 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be8b38bc-131f-4919-91a8-5c761a14a2b1-catalog-content\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:16 crc kubenswrapper[4813]: I1007 19:22:16.926882 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kzf7b"] Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.006922 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be8b38bc-131f-4919-91a8-5c761a14a2b1-utilities\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.006965 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sr8b\" (UniqueName: \"kubernetes.io/projected/be8b38bc-131f-4919-91a8-5c761a14a2b1-kube-api-access-4sr8b\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.006998 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be8b38bc-131f-4919-91a8-5c761a14a2b1-catalog-content\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.007383 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be8b38bc-131f-4919-91a8-5c761a14a2b1-catalog-content\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.007609 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be8b38bc-131f-4919-91a8-5c761a14a2b1-utilities\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.023656 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sr8b\" (UniqueName: \"kubernetes.io/projected/be8b38bc-131f-4919-91a8-5c761a14a2b1-kube-api-access-4sr8b\") pod \"certified-operators-9pr84\" (UID: \"be8b38bc-131f-4919-91a8-5c761a14a2b1\") " pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.119286 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.351637 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9pr84"] Oct 07 19:22:17 crc kubenswrapper[4813]: W1007 19:22:17.360116 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe8b38bc_131f_4919_91a8_5c761a14a2b1.slice/crio-8802b6bed99153ac3851502d84f4d9518c70338b00c5d2b50c16e270d145507e WatchSource:0}: Error finding container 8802b6bed99153ac3851502d84f4d9518c70338b00c5d2b50c16e270d145507e: Status 404 returned error can't find the container with id 8802b6bed99153ac3851502d84f4d9518c70338b00c5d2b50c16e270d145507e Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.410045 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pr84" event={"ID":"be8b38bc-131f-4919-91a8-5c761a14a2b1","Type":"ContainerStarted","Data":"8802b6bed99153ac3851502d84f4d9518c70338b00c5d2b50c16e270d145507e"} Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.412703 4813 generic.go:334] "Generic (PLEG): container finished" podID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerID="4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda" exitCode=0 Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.413796 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kzf7b" event={"ID":"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0","Type":"ContainerDied","Data":"4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda"} Oct 07 19:22:17 crc kubenswrapper[4813]: I1007 19:22:17.413906 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kzf7b" event={"ID":"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0","Type":"ContainerStarted","Data":"32a6fab9b90e8136256e5e8ddb9d9834d332d36eae71dd9e9a14c4b1161c7b61"} Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.433147 4813 generic.go:334] "Generic (PLEG): container finished" podID="be8b38bc-131f-4919-91a8-5c761a14a2b1" containerID="daf60769bb5058498bf6b58f54142ddbb7a010a3033b3b74840c53371a40e6d0" exitCode=0 Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.433197 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pr84" event={"ID":"be8b38bc-131f-4919-91a8-5c761a14a2b1","Type":"ContainerDied","Data":"daf60769bb5058498bf6b58f54142ddbb7a010a3033b3b74840c53371a40e6d0"} Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.608064 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q5n28"] Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.608972 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.615494 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.664879 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5n28"] Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.729889 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cb01a-80ee-46db-93e3-c53740304297-catalog-content\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.729986 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24nxz\" (UniqueName: \"kubernetes.io/projected/d79cb01a-80ee-46db-93e3-c53740304297-kube-api-access-24nxz\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.730042 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cb01a-80ee-46db-93e3-c53740304297-utilities\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.832270 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cb01a-80ee-46db-93e3-c53740304297-catalog-content\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.832335 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24nxz\" (UniqueName: \"kubernetes.io/projected/d79cb01a-80ee-46db-93e3-c53740304297-kube-api-access-24nxz\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.832390 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cb01a-80ee-46db-93e3-c53740304297-utilities\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.832869 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cb01a-80ee-46db-93e3-c53740304297-utilities\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.832931 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cb01a-80ee-46db-93e3-c53740304297-catalog-content\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.853686 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24nxz\" (UniqueName: \"kubernetes.io/projected/d79cb01a-80ee-46db-93e3-c53740304297-kube-api-access-24nxz\") pod \"redhat-marketplace-q5n28\" (UID: \"d79cb01a-80ee-46db-93e3-c53740304297\") " pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:18 crc kubenswrapper[4813]: I1007 19:22:18.925315 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.119261 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q5n28"] Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.206147 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w6v68"] Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.207518 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.211780 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w6v68"] Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.212905 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.236269 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3588ed4b-20d8-4233-8542-27542f2bb5e4-utilities\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.236296 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3588ed4b-20d8-4233-8542-27542f2bb5e4-catalog-content\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.236347 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk72r\" (UniqueName: \"kubernetes.io/projected/3588ed4b-20d8-4233-8542-27542f2bb5e4-kube-api-access-pk72r\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.337191 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk72r\" (UniqueName: \"kubernetes.io/projected/3588ed4b-20d8-4233-8542-27542f2bb5e4-kube-api-access-pk72r\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.337295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3588ed4b-20d8-4233-8542-27542f2bb5e4-utilities\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.337509 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3588ed4b-20d8-4233-8542-27542f2bb5e4-catalog-content\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.337797 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3588ed4b-20d8-4233-8542-27542f2bb5e4-utilities\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.337843 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3588ed4b-20d8-4233-8542-27542f2bb5e4-catalog-content\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.355298 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk72r\" (UniqueName: \"kubernetes.io/projected/3588ed4b-20d8-4233-8542-27542f2bb5e4-kube-api-access-pk72r\") pod \"redhat-operators-w6v68\" (UID: \"3588ed4b-20d8-4233-8542-27542f2bb5e4\") " pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.439773 4813 generic.go:334] "Generic (PLEG): container finished" podID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerID="6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e" exitCode=0 Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.439830 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kzf7b" event={"ID":"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0","Type":"ContainerDied","Data":"6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e"} Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.440794 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5n28" event={"ID":"d79cb01a-80ee-46db-93e3-c53740304297","Type":"ContainerStarted","Data":"7cccc1dfdf6e1262d6f46f199327fb3f23a690d03f6def8b26239551069140ea"} Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.541712 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:19 crc kubenswrapper[4813]: I1007 19:22:19.938812 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w6v68"] Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.446543 4813 generic.go:334] "Generic (PLEG): container finished" podID="3588ed4b-20d8-4233-8542-27542f2bb5e4" containerID="8420b26684c1c763ca028461c37890d90d407dd4f29f108087edb386be610a1e" exitCode=0 Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.447121 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6v68" event={"ID":"3588ed4b-20d8-4233-8542-27542f2bb5e4","Type":"ContainerDied","Data":"8420b26684c1c763ca028461c37890d90d407dd4f29f108087edb386be610a1e"} Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.447148 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6v68" event={"ID":"3588ed4b-20d8-4233-8542-27542f2bb5e4","Type":"ContainerStarted","Data":"a4bfe4485d77ba4c43406b667487ef6b80f793c279d187065bef5859fc01d49e"} Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.451825 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kzf7b" event={"ID":"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0","Type":"ContainerStarted","Data":"2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671"} Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.453541 4813 generic.go:334] "Generic (PLEG): container finished" podID="d79cb01a-80ee-46db-93e3-c53740304297" containerID="105caaa3e5a24e29adb28cab43d900d5d3016a97798977b25f0a0fc1edbecfe4" exitCode=0 Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.453596 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5n28" event={"ID":"d79cb01a-80ee-46db-93e3-c53740304297","Type":"ContainerDied","Data":"105caaa3e5a24e29adb28cab43d900d5d3016a97798977b25f0a0fc1edbecfe4"} Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.455461 4813 generic.go:334] "Generic (PLEG): container finished" podID="be8b38bc-131f-4919-91a8-5c761a14a2b1" containerID="5a784cef8837e0c71cc4c46d2d1df25998b732491528ef7bcc95c73462348103" exitCode=0 Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.455487 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pr84" event={"ID":"be8b38bc-131f-4919-91a8-5c761a14a2b1","Type":"ContainerDied","Data":"5a784cef8837e0c71cc4c46d2d1df25998b732491528ef7bcc95c73462348103"} Oct 07 19:22:20 crc kubenswrapper[4813]: I1007 19:22:20.478611 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kzf7b" podStartSLOduration=1.6960119869999999 podStartE2EDuration="4.478595058s" podCreationTimestamp="2025-10-07 19:22:16 +0000 UTC" firstStartedPulling="2025-10-07 19:22:17.414410382 +0000 UTC m=+263.492665993" lastFinishedPulling="2025-10-07 19:22:20.196993463 +0000 UTC m=+266.275249064" observedRunningTime="2025-10-07 19:22:20.476811927 +0000 UTC m=+266.555067538" watchObservedRunningTime="2025-10-07 19:22:20.478595058 +0000 UTC m=+266.556850669" Oct 07 19:22:21 crc kubenswrapper[4813]: I1007 19:22:21.462927 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9pr84" event={"ID":"be8b38bc-131f-4919-91a8-5c761a14a2b1","Type":"ContainerStarted","Data":"1366df60756230aad9ab063652e4f2a0e367e9562011662cf4eb8637d7b2bd6c"} Oct 07 19:22:21 crc kubenswrapper[4813]: I1007 19:22:21.466421 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6v68" event={"ID":"3588ed4b-20d8-4233-8542-27542f2bb5e4","Type":"ContainerStarted","Data":"4acc0e3360aec533425d38ed3a6e3ba219451b464f3d6494dd93a9206b6ea2c4"} Oct 07 19:22:21 crc kubenswrapper[4813]: I1007 19:22:21.468214 4813 generic.go:334] "Generic (PLEG): container finished" podID="d79cb01a-80ee-46db-93e3-c53740304297" containerID="27a2efc83771800016688a0cfe6ea139486a9e0cb6149a24638f07007c6bd4cf" exitCode=0 Oct 07 19:22:21 crc kubenswrapper[4813]: I1007 19:22:21.468285 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5n28" event={"ID":"d79cb01a-80ee-46db-93e3-c53740304297","Type":"ContainerDied","Data":"27a2efc83771800016688a0cfe6ea139486a9e0cb6149a24638f07007c6bd4cf"} Oct 07 19:22:21 crc kubenswrapper[4813]: I1007 19:22:21.479830 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9pr84" podStartSLOduration=3.069256578 podStartE2EDuration="5.479814884s" podCreationTimestamp="2025-10-07 19:22:16 +0000 UTC" firstStartedPulling="2025-10-07 19:22:18.435584151 +0000 UTC m=+264.513839762" lastFinishedPulling="2025-10-07 19:22:20.846142457 +0000 UTC m=+266.924398068" observedRunningTime="2025-10-07 19:22:21.477848568 +0000 UTC m=+267.556104199" watchObservedRunningTime="2025-10-07 19:22:21.479814884 +0000 UTC m=+267.558070505" Oct 07 19:22:22 crc kubenswrapper[4813]: I1007 19:22:22.473780 4813 generic.go:334] "Generic (PLEG): container finished" podID="3588ed4b-20d8-4233-8542-27542f2bb5e4" containerID="4acc0e3360aec533425d38ed3a6e3ba219451b464f3d6494dd93a9206b6ea2c4" exitCode=0 Oct 07 19:22:22 crc kubenswrapper[4813]: I1007 19:22:22.473882 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6v68" event={"ID":"3588ed4b-20d8-4233-8542-27542f2bb5e4","Type":"ContainerDied","Data":"4acc0e3360aec533425d38ed3a6e3ba219451b464f3d6494dd93a9206b6ea2c4"} Oct 07 19:22:22 crc kubenswrapper[4813]: I1007 19:22:22.477883 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q5n28" event={"ID":"d79cb01a-80ee-46db-93e3-c53740304297","Type":"ContainerStarted","Data":"c5f65c50e2db4b370d5732e0e3e375757ccdaa3888cd5681cd5e58ed7f44c60f"} Oct 07 19:22:22 crc kubenswrapper[4813]: I1007 19:22:22.525195 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q5n28" podStartSLOduration=3.09967572 podStartE2EDuration="4.525173145s" podCreationTimestamp="2025-10-07 19:22:18 +0000 UTC" firstStartedPulling="2025-10-07 19:22:20.454755885 +0000 UTC m=+266.533011496" lastFinishedPulling="2025-10-07 19:22:21.88025331 +0000 UTC m=+267.958508921" observedRunningTime="2025-10-07 19:22:22.524399343 +0000 UTC m=+268.602654954" watchObservedRunningTime="2025-10-07 19:22:22.525173145 +0000 UTC m=+268.603428756" Oct 07 19:22:24 crc kubenswrapper[4813]: I1007 19:22:24.488590 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w6v68" event={"ID":"3588ed4b-20d8-4233-8542-27542f2bb5e4","Type":"ContainerStarted","Data":"3f132b04bd5fbe4a5275cb2578151058c76af65591c87d2e60c1a1e9b3148ca8"} Oct 07 19:22:24 crc kubenswrapper[4813]: I1007 19:22:24.505224 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w6v68" podStartSLOduration=2.597739781 podStartE2EDuration="5.505207295s" podCreationTimestamp="2025-10-07 19:22:19 +0000 UTC" firstStartedPulling="2025-10-07 19:22:20.448218281 +0000 UTC m=+266.526473892" lastFinishedPulling="2025-10-07 19:22:23.355685795 +0000 UTC m=+269.433941406" observedRunningTime="2025-10-07 19:22:24.50503596 +0000 UTC m=+270.583291601" watchObservedRunningTime="2025-10-07 19:22:24.505207295 +0000 UTC m=+270.583462906" Oct 07 19:22:26 crc kubenswrapper[4813]: I1007 19:22:26.537341 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:26 crc kubenswrapper[4813]: I1007 19:22:26.538413 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:26 crc kubenswrapper[4813]: I1007 19:22:26.579574 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:27 crc kubenswrapper[4813]: I1007 19:22:27.119517 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:27 crc kubenswrapper[4813]: I1007 19:22:27.119871 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:27 crc kubenswrapper[4813]: I1007 19:22:27.156911 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:27 crc kubenswrapper[4813]: I1007 19:22:27.538497 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9pr84" Oct 07 19:22:27 crc kubenswrapper[4813]: I1007 19:22:27.549618 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:22:28 crc kubenswrapper[4813]: I1007 19:22:28.925774 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:28 crc kubenswrapper[4813]: I1007 19:22:28.926127 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:28 crc kubenswrapper[4813]: I1007 19:22:28.986130 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:29 crc kubenswrapper[4813]: I1007 19:22:29.542875 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:29 crc kubenswrapper[4813]: I1007 19:22:29.542915 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:29 crc kubenswrapper[4813]: I1007 19:22:29.559314 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q5n28" Oct 07 19:22:29 crc kubenswrapper[4813]: I1007 19:22:29.606999 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:22:30 crc kubenswrapper[4813]: I1007 19:22:30.581802 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w6v68" Oct 07 19:23:52 crc kubenswrapper[4813]: I1007 19:23:52.078938 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:23:52 crc kubenswrapper[4813]: I1007 19:23:52.079589 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:24:22 crc kubenswrapper[4813]: I1007 19:24:22.078309 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:24:22 crc kubenswrapper[4813]: I1007 19:24:22.078981 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.078385 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.078920 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.078961 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.079440 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0830cd9c6c944f536089b77bf873249e6b2b285e17b46cc92095ca9afc2e0ff7"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.079487 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://0830cd9c6c944f536089b77bf873249e6b2b285e17b46cc92095ca9afc2e0ff7" gracePeriod=600 Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.506605 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="0830cd9c6c944f536089b77bf873249e6b2b285e17b46cc92095ca9afc2e0ff7" exitCode=0 Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.506681 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"0830cd9c6c944f536089b77bf873249e6b2b285e17b46cc92095ca9afc2e0ff7"} Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.506923 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"b78c84794157bdaf4c6d8429f03a3dc0ddbbcbef98ccb9a89291d17bfc31a4dd"} Oct 07 19:24:52 crc kubenswrapper[4813]: I1007 19:24:52.506946 4813 scope.go:117] "RemoveContainer" containerID="e3b7b4b04700f90e0db1e090b9085ca0f8cac58d32468927ab8082b69289d80d" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.254801 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4xdfs"] Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.257204 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.265045 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4xdfs"] Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343380 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ca4e7410-afa7-484d-adc6-0a90b18842ea-registry-certificates\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343421 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-bound-sa-token\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343441 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gxr6\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-kube-api-access-5gxr6\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343464 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ca4e7410-afa7-484d-adc6-0a90b18842ea-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343489 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ca4e7410-afa7-484d-adc6-0a90b18842ea-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343601 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-registry-tls\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343650 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.343694 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca4e7410-afa7-484d-adc6-0a90b18842ea-trusted-ca\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.367040 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.444990 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-registry-tls\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.445213 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca4e7410-afa7-484d-adc6-0a90b18842ea-trusted-ca\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.445354 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ca4e7410-afa7-484d-adc6-0a90b18842ea-registry-certificates\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.445446 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-bound-sa-token\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.445548 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gxr6\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-kube-api-access-5gxr6\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.445631 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ca4e7410-afa7-484d-adc6-0a90b18842ea-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.445710 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ca4e7410-afa7-484d-adc6-0a90b18842ea-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.446632 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ca4e7410-afa7-484d-adc6-0a90b18842ea-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.446832 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ca4e7410-afa7-484d-adc6-0a90b18842ea-registry-certificates\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.447574 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ca4e7410-afa7-484d-adc6-0a90b18842ea-trusted-ca\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.451463 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ca4e7410-afa7-484d-adc6-0a90b18842ea-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.451890 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-registry-tls\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.468288 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-bound-sa-token\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.482939 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gxr6\" (UniqueName: \"kubernetes.io/projected/ca4e7410-afa7-484d-adc6-0a90b18842ea-kube-api-access-5gxr6\") pod \"image-registry-66df7c8f76-4xdfs\" (UID: \"ca4e7410-afa7-484d-adc6-0a90b18842ea\") " pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.612138 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:14 crc kubenswrapper[4813]: I1007 19:25:14.814367 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4xdfs"] Oct 07 19:25:15 crc kubenswrapper[4813]: I1007 19:25:15.649049 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" event={"ID":"ca4e7410-afa7-484d-adc6-0a90b18842ea","Type":"ContainerStarted","Data":"802cf1531f516942677ef7541e149a70e9242d14fd46eab66f1f0c6f910254b0"} Oct 07 19:25:15 crc kubenswrapper[4813]: I1007 19:25:15.649372 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" event={"ID":"ca4e7410-afa7-484d-adc6-0a90b18842ea","Type":"ContainerStarted","Data":"cb8955bfd1a18e47994c5d738dd5b3a4073aa52be39e65531f4790e1fbaaf362"} Oct 07 19:25:15 crc kubenswrapper[4813]: I1007 19:25:15.650535 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:15 crc kubenswrapper[4813]: I1007 19:25:15.681266 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" podStartSLOduration=1.681239282 podStartE2EDuration="1.681239282s" podCreationTimestamp="2025-10-07 19:25:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:25:15.676849921 +0000 UTC m=+441.755105572" watchObservedRunningTime="2025-10-07 19:25:15.681239282 +0000 UTC m=+441.759494923" Oct 07 19:25:34 crc kubenswrapper[4813]: I1007 19:25:34.622136 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-4xdfs" Oct 07 19:25:34 crc kubenswrapper[4813]: I1007 19:25:34.725816 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pxbqp"] Oct 07 19:25:59 crc kubenswrapper[4813]: I1007 19:25:59.801628 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" podUID="bea39b1d-02dc-43ee-939b-1849fbd3bedd" containerName="registry" containerID="cri-o://11e3b51615df3e92c1deb763d73f7f8747013526263ae6a525dfb835b0d88d0e" gracePeriod=30 Oct 07 19:25:59 crc kubenswrapper[4813]: I1007 19:25:59.940773 4813 generic.go:334] "Generic (PLEG): container finished" podID="bea39b1d-02dc-43ee-939b-1849fbd3bedd" containerID="11e3b51615df3e92c1deb763d73f7f8747013526263ae6a525dfb835b0d88d0e" exitCode=0 Oct 07 19:25:59 crc kubenswrapper[4813]: I1007 19:25:59.940832 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" event={"ID":"bea39b1d-02dc-43ee-939b-1849fbd3bedd","Type":"ContainerDied","Data":"11e3b51615df3e92c1deb763d73f7f8747013526263ae6a525dfb835b0d88d0e"} Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.214697 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340389 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-trusted-ca\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340456 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bea39b1d-02dc-43ee-939b-1849fbd3bedd-installation-pull-secrets\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340482 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-certificates\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340639 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc8rp\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-kube-api-access-vc8rp\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340692 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-tls\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340723 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bea39b1d-02dc-43ee-939b-1849fbd3bedd-ca-trust-extracted\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.340758 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-bound-sa-token\") pod \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\" (UID: \"bea39b1d-02dc-43ee-939b-1849fbd3bedd\") " Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.342090 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.344209 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.351665 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.356186 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea39b1d-02dc-43ee-939b-1849fbd3bedd-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.357495 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.358785 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-kube-api-access-vc8rp" (OuterVolumeSpecName: "kube-api-access-vc8rp") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "kube-api-access-vc8rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.360374 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bea39b1d-02dc-43ee-939b-1849fbd3bedd-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.361177 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "bea39b1d-02dc-43ee-939b-1849fbd3bedd" (UID: "bea39b1d-02dc-43ee-939b-1849fbd3bedd"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460315 4813 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460401 4813 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bea39b1d-02dc-43ee-939b-1849fbd3bedd-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460425 4813 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460442 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460464 4813 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bea39b1d-02dc-43ee-939b-1849fbd3bedd-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460486 4813 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bea39b1d-02dc-43ee-939b-1849fbd3bedd-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.460507 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc8rp\" (UniqueName: \"kubernetes.io/projected/bea39b1d-02dc-43ee-939b-1849fbd3bedd-kube-api-access-vc8rp\") on node \"crc\" DevicePath \"\"" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.951783 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" event={"ID":"bea39b1d-02dc-43ee-939b-1849fbd3bedd","Type":"ContainerDied","Data":"8a5168fcd70c994da3785a7e12791ebd0ae046b5d22b6f7ce7890d11d9825bbc"} Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.951875 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pxbqp" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.952135 4813 scope.go:117] "RemoveContainer" containerID="11e3b51615df3e92c1deb763d73f7f8747013526263ae6a525dfb835b0d88d0e" Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.978833 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pxbqp"] Oct 07 19:26:00 crc kubenswrapper[4813]: I1007 19:26:00.987690 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pxbqp"] Oct 07 19:26:02 crc kubenswrapper[4813]: I1007 19:26:02.615735 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bea39b1d-02dc-43ee-939b-1849fbd3bedd" path="/var/lib/kubelet/pods/bea39b1d-02dc-43ee-939b-1849fbd3bedd/volumes" Oct 07 19:26:52 crc kubenswrapper[4813]: I1007 19:26:52.078742 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:26:52 crc kubenswrapper[4813]: I1007 19:26:52.079489 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.549580 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k7vkh"] Oct 07 19:27:19 crc kubenswrapper[4813]: E1007 19:27:19.550331 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea39b1d-02dc-43ee-939b-1849fbd3bedd" containerName="registry" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.550343 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea39b1d-02dc-43ee-939b-1849fbd3bedd" containerName="registry" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.550429 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bea39b1d-02dc-43ee-939b-1849fbd3bedd" containerName="registry" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.550776 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.554916 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.554984 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.555831 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-xbxzt" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.573086 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k7vkh"] Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.579578 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7lwr4"] Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.580186 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7lwr4" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.581945 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6bk8g" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.590058 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-5mt2d"] Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.590883 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.593756 4813 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dbszb" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.602569 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7lwr4"] Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.620176 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-5mt2d"] Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.747205 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dchg5\" (UniqueName: \"kubernetes.io/projected/1c7224a1-2e4f-4cc4-a127-3791d5c68f6b-kube-api-access-dchg5\") pod \"cert-manager-webhook-5655c58dd6-5mt2d\" (UID: \"1c7224a1-2e4f-4cc4-a127-3791d5c68f6b\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.747272 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btn5d\" (UniqueName: \"kubernetes.io/projected/c1b0f2ad-748f-4212-809f-9e5d658608e5-kube-api-access-btn5d\") pod \"cert-manager-5b446d88c5-7lwr4\" (UID: \"c1b0f2ad-748f-4212-809f-9e5d658608e5\") " pod="cert-manager/cert-manager-5b446d88c5-7lwr4" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.747334 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rbpk\" (UniqueName: \"kubernetes.io/projected/13884ec5-d712-4cd6-86d3-b1e6059b5fb7-kube-api-access-8rbpk\") pod \"cert-manager-cainjector-7f985d654d-k7vkh\" (UID: \"13884ec5-d712-4cd6-86d3-b1e6059b5fb7\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.849099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btn5d\" (UniqueName: \"kubernetes.io/projected/c1b0f2ad-748f-4212-809f-9e5d658608e5-kube-api-access-btn5d\") pod \"cert-manager-5b446d88c5-7lwr4\" (UID: \"c1b0f2ad-748f-4212-809f-9e5d658608e5\") " pod="cert-manager/cert-manager-5b446d88c5-7lwr4" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.849179 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rbpk\" (UniqueName: \"kubernetes.io/projected/13884ec5-d712-4cd6-86d3-b1e6059b5fb7-kube-api-access-8rbpk\") pod \"cert-manager-cainjector-7f985d654d-k7vkh\" (UID: \"13884ec5-d712-4cd6-86d3-b1e6059b5fb7\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.849282 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dchg5\" (UniqueName: \"kubernetes.io/projected/1c7224a1-2e4f-4cc4-a127-3791d5c68f6b-kube-api-access-dchg5\") pod \"cert-manager-webhook-5655c58dd6-5mt2d\" (UID: \"1c7224a1-2e4f-4cc4-a127-3791d5c68f6b\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.869084 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rbpk\" (UniqueName: \"kubernetes.io/projected/13884ec5-d712-4cd6-86d3-b1e6059b5fb7-kube-api-access-8rbpk\") pod \"cert-manager-cainjector-7f985d654d-k7vkh\" (UID: \"13884ec5-d712-4cd6-86d3-b1e6059b5fb7\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.871094 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btn5d\" (UniqueName: \"kubernetes.io/projected/c1b0f2ad-748f-4212-809f-9e5d658608e5-kube-api-access-btn5d\") pod \"cert-manager-5b446d88c5-7lwr4\" (UID: \"c1b0f2ad-748f-4212-809f-9e5d658608e5\") " pod="cert-manager/cert-manager-5b446d88c5-7lwr4" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.875396 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.881125 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dchg5\" (UniqueName: \"kubernetes.io/projected/1c7224a1-2e4f-4cc4-a127-3791d5c68f6b-kube-api-access-dchg5\") pod \"cert-manager-webhook-5655c58dd6-5mt2d\" (UID: \"1c7224a1-2e4f-4cc4-a127-3791d5c68f6b\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.897557 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7lwr4" Oct 07 19:27:19 crc kubenswrapper[4813]: I1007 19:27:19.911477 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.099052 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k7vkh"] Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.120818 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.148628 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7lwr4"] Oct 07 19:27:20 crc kubenswrapper[4813]: W1007 19:27:20.152514 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1b0f2ad_748f_4212_809f_9e5d658608e5.slice/crio-cd3e7a23e4ba3b96c2e2cfc407f8411f9182b8bbe77b81b88d41a22e1d4a1e76 WatchSource:0}: Error finding container cd3e7a23e4ba3b96c2e2cfc407f8411f9182b8bbe77b81b88d41a22e1d4a1e76: Status 404 returned error can't find the container with id cd3e7a23e4ba3b96c2e2cfc407f8411f9182b8bbe77b81b88d41a22e1d4a1e76 Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.172665 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-5mt2d"] Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.452095 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" event={"ID":"1c7224a1-2e4f-4cc4-a127-3791d5c68f6b","Type":"ContainerStarted","Data":"e85319198b3782d6c78695110755c007667ec039118790983b7148b5f09cfaa2"} Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.453063 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7lwr4" event={"ID":"c1b0f2ad-748f-4212-809f-9e5d658608e5","Type":"ContainerStarted","Data":"cd3e7a23e4ba3b96c2e2cfc407f8411f9182b8bbe77b81b88d41a22e1d4a1e76"} Oct 07 19:27:20 crc kubenswrapper[4813]: I1007 19:27:20.453998 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" event={"ID":"13884ec5-d712-4cd6-86d3-b1e6059b5fb7","Type":"ContainerStarted","Data":"67e29513293912402a769d6b414e1a310bf2984a8281fb8634ea2a5fcb08922a"} Oct 07 19:27:22 crc kubenswrapper[4813]: I1007 19:27:22.079301 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:27:22 crc kubenswrapper[4813]: I1007 19:27:22.080105 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.485573 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" event={"ID":"1c7224a1-2e4f-4cc4-a127-3791d5c68f6b","Type":"ContainerStarted","Data":"ec59923388ae4efd6278796dce33bc36899efdf08662091627833a581f69c727"} Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.486111 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.486876 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7lwr4" event={"ID":"c1b0f2ad-748f-4212-809f-9e5d658608e5","Type":"ContainerStarted","Data":"686d96f6f66c5dd2103a0217f7655573e02e7b39a79b2747509c512e9a342e9d"} Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.488111 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" event={"ID":"13884ec5-d712-4cd6-86d3-b1e6059b5fb7","Type":"ContainerStarted","Data":"608bf4660e9ea8c8bea6f33c29f62c4c973a5d98391c442d12d98a969f821de0"} Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.502159 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" podStartSLOduration=2.095683151 podStartE2EDuration="5.502138232s" podCreationTimestamp="2025-10-07 19:27:19 +0000 UTC" firstStartedPulling="2025-10-07 19:27:20.186059958 +0000 UTC m=+566.264315569" lastFinishedPulling="2025-10-07 19:27:23.592515029 +0000 UTC m=+569.670770650" observedRunningTime="2025-10-07 19:27:24.49940767 +0000 UTC m=+570.577663281" watchObservedRunningTime="2025-10-07 19:27:24.502138232 +0000 UTC m=+570.580393843" Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.517350 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-7lwr4" podStartSLOduration=1.99367578 podStartE2EDuration="5.517313727s" podCreationTimestamp="2025-10-07 19:27:19 +0000 UTC" firstStartedPulling="2025-10-07 19:27:20.154441099 +0000 UTC m=+566.232696710" lastFinishedPulling="2025-10-07 19:27:23.678079046 +0000 UTC m=+569.756334657" observedRunningTime="2025-10-07 19:27:24.514014038 +0000 UTC m=+570.592269649" watchObservedRunningTime="2025-10-07 19:27:24.517313727 +0000 UTC m=+570.595569338" Oct 07 19:27:24 crc kubenswrapper[4813]: I1007 19:27:24.530604 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-k7vkh" podStartSLOduration=2.059830826 podStartE2EDuration="5.530580885s" podCreationTimestamp="2025-10-07 19:27:19 +0000 UTC" firstStartedPulling="2025-10-07 19:27:20.120540503 +0000 UTC m=+566.198796114" lastFinishedPulling="2025-10-07 19:27:23.591290552 +0000 UTC m=+569.669546173" observedRunningTime="2025-10-07 19:27:24.528107701 +0000 UTC m=+570.606363312" watchObservedRunningTime="2025-10-07 19:27:24.530580885 +0000 UTC m=+570.608836496" Oct 07 19:27:29 crc kubenswrapper[4813]: I1007 19:27:29.915494 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-5mt2d" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253035 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vvpdd"] Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253478 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-controller" containerID="cri-o://839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253517 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="nbdb" containerID="cri-o://d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253653 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="sbdb" containerID="cri-o://bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253675 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-node" containerID="cri-o://5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253669 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="northd" containerID="cri-o://515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.253746 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-acl-logging" containerID="cri-o://f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.254210 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.289173 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" containerID="cri-o://352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" gracePeriod=30 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.538951 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovnkube-controller/3.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.540374 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovn-acl-logging/0.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.540745 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovn-controller/0.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541011 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" exitCode=0 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541034 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed" exitCode=0 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541043 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628" exitCode=0 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541051 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53" exitCode=0 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541057 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85" exitCode=143 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541065 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d" exitCode=143 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541095 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541117 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541126 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541134 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541142 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541150 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.541163 4813 scope.go:117] "RemoveContainer" containerID="6b7f6a41e39a7f37b48ccaeaa6b2b2ca58460c8b13c67a269eb7a3b08d2b1202" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.543166 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/2.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.543681 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/1.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.543704 4813 generic.go:334] "Generic (PLEG): container finished" podID="76e24ee5-81b1-4538-aca5-141e399e32e9" containerID="ea12b0917e0500eb874905c967fd2d8059d5c17db195a77e9f1face4a7c3548c" exitCode=2 Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.543718 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerDied","Data":"ea12b0917e0500eb874905c967fd2d8059d5c17db195a77e9f1face4a7c3548c"} Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.544059 4813 scope.go:117] "RemoveContainer" containerID="ea12b0917e0500eb874905c967fd2d8059d5c17db195a77e9f1face4a7c3548c" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.544240 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gbxzg_openshift-multus(76e24ee5-81b1-4538-aca5-141e399e32e9)\"" pod="openshift-multus/multus-gbxzg" podUID="76e24ee5-81b1-4538-aca5-141e399e32e9" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.544965 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 is running failed: container process not found" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" cmd=["/bin/bash","-c","#!/bin/bash\ntest -f /etc/cni/net.d/10-ovn-kubernetes.conf\n"] Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.545176 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 is running failed: container process not found" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" cmd=["/bin/bash","-c","#!/bin/bash\ntest -f /etc/cni/net.d/10-ovn-kubernetes.conf\n"] Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.545427 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 is running failed: container process not found" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" cmd=["/bin/bash","-c","#!/bin/bash\ntest -f /etc/cni/net.d/10-ovn-kubernetes.conf\n"] Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.545455 4813 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.590485 4813 scope.go:117] "RemoveContainer" containerID="42c795ce4714a9b8b78b0e1109e00d51c75e32fe845386365ce004ba2f0398b9" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.615942 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovn-acl-logging/0.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.616862 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovn-controller/0.log" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.617489 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.663911 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ctkbx"] Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.664311 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.664464 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.664521 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.664566 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.664616 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.664664 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.664719 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-node" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.664770 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-node" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.664821 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="nbdb" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.664887 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="nbdb" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.664951 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665006 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.665058 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="northd" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665109 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="northd" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.665174 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="sbdb" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665239 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="sbdb" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.665293 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-acl-logging" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665372 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-acl-logging" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.665426 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kubecfg-setup" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665477 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kubecfg-setup" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.665529 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665578 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665724 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="nbdb" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665788 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665838 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-acl-logging" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665892 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="northd" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.665957 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-node" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666010 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666057 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666110 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="sbdb" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666160 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovn-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666211 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="kube-rbac-proxy-ovn-metrics" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.666384 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666445 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: E1007 19:27:30.666497 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666548 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666689 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.666748 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerName="ovnkube-controller" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.668290 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.705668 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmnhk\" (UniqueName: \"kubernetes.io/projected/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-kube-api-access-pmnhk\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.705907 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-netd\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706021 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-systemd-units\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.705972 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706092 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706106 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-bin\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706162 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-var-lib-openvswitch\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706189 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-script-lib\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706221 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-netns\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706244 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-systemd\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706264 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-ovn\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706293 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-node-log\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706323 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-openvswitch\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706335 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706368 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-env-overrides\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706395 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-node-log" (OuterVolumeSpecName: "node-log") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706405 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-slash\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706376 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706455 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-config\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706490 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-log-socket\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706521 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706548 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-etc-openvswitch\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706579 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-kubelet\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706606 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-ovn-kubernetes\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706631 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovn-node-metrics-cert\") pod \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\" (UID: \"e3bc364e-ed17-44b8-9942-b41e6b8ac13a\") " Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706420 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706828 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706845 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706851 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706805 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-systemd\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706836 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706442 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-slash" (OuterVolumeSpecName: "host-slash") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706749 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706773 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706821 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706868 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706873 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.706963 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-run-netns\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707058 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovnkube-config\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707167 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-env-overrides\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707241 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-etc-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707292 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-node-log\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707317 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-cni-netd\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707376 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-kubelet\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707406 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-systemd-units\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707438 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-cni-bin\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707484 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-slash\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707515 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovn-node-metrics-cert\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707550 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-ovn\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707596 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovnkube-script-lib\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707619 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8twc8\" (UniqueName: \"kubernetes.io/projected/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-kube-api-access-8twc8\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707645 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-run-ovn-kubernetes\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707671 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707701 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-log-socket\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707767 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-var-lib-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707884 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-log-socket" (OuterVolumeSpecName: "log-socket") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.707900 4813 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708051 4813 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708208 4813 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708271 4813 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708324 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708399 4813 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708463 4813 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.708534 4813 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-node-log\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709561 4813 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709586 4813 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709598 4813 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-slash\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709609 4813 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709624 4813 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709639 4813 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709652 4813 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.709664 4813 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.710803 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.710858 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-kube-api-access-pmnhk" (OuterVolumeSpecName: "kube-api-access-pmnhk") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "kube-api-access-pmnhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.721075 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e3bc364e-ed17-44b8-9942-b41e6b8ac13a" (UID: "e3bc364e-ed17-44b8-9942-b41e6b8ac13a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810718 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-systemd\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810775 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-run-netns\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810801 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovnkube-config\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810827 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-env-overrides\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810840 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-run-netns\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810891 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-etc-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810854 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-etc-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810930 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-node-log\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810949 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-cni-netd\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810969 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-kubelet\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810849 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-systemd\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.810985 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-systemd-units\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811010 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-cni-bin\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811019 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-cni-netd\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811047 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-slash\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811081 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-kubelet\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-systemd-units\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811114 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-cni-bin\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811116 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-node-log\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811028 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-slash\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811184 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovn-node-metrics-cert\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811223 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-ovn\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811249 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8twc8\" (UniqueName: \"kubernetes.io/projected/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-kube-api-access-8twc8\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811275 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovnkube-script-lib\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811320 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-ovn\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811380 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-run-ovn-kubernetes\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811401 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-env-overrides\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811419 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811476 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-log-socket\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811544 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811579 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-var-lib-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811613 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811641 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-host-run-ovn-kubernetes\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-run-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811814 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-log-socket\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811837 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-var-lib-openvswitch\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811896 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovnkube-script-lib\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811906 4813 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.811999 4813 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-log-socket\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.812014 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.812026 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmnhk\" (UniqueName: \"kubernetes.io/projected/e3bc364e-ed17-44b8-9942-b41e6b8ac13a-kube-api-access-pmnhk\") on node \"crc\" DevicePath \"\"" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.812021 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovnkube-config\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.814203 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-ovn-node-metrics-cert\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.829354 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8twc8\" (UniqueName: \"kubernetes.io/projected/e49d6663-d2b8-4e00-93ac-df1f3c14f9b1-kube-api-access-8twc8\") pod \"ovnkube-node-ctkbx\" (UID: \"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:30 crc kubenswrapper[4813]: I1007 19:27:30.981670 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:31 crc kubenswrapper[4813]: W1007 19:27:31.006679 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode49d6663_d2b8_4e00_93ac_df1f3c14f9b1.slice/crio-a34d22484407b7b5d0080e92e246c47c511fe84283337ececa6cb5f1e966519a WatchSource:0}: Error finding container a34d22484407b7b5d0080e92e246c47c511fe84283337ececa6cb5f1e966519a: Status 404 returned error can't find the container with id a34d22484407b7b5d0080e92e246c47c511fe84283337ececa6cb5f1e966519a Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.551187 4813 generic.go:334] "Generic (PLEG): container finished" podID="e49d6663-d2b8-4e00-93ac-df1f3c14f9b1" containerID="3ab3749f9c5cebe7c81522962e68733347d9ce52671e0a758831f97b9408db5d" exitCode=0 Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.551228 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerDied","Data":"3ab3749f9c5cebe7c81522962e68733347d9ce52671e0a758831f97b9408db5d"} Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.551278 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"a34d22484407b7b5d0080e92e246c47c511fe84283337ececa6cb5f1e966519a"} Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.557982 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovn-acl-logging/0.log" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.558744 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-vvpdd_e3bc364e-ed17-44b8-9942-b41e6b8ac13a/ovn-controller/0.log" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559420 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8" exitCode=0 Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559444 4813 generic.go:334] "Generic (PLEG): container finished" podID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" containerID="d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df" exitCode=0 Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559496 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8"} Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559522 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df"} Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559536 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" event={"ID":"e3bc364e-ed17-44b8-9942-b41e6b8ac13a","Type":"ContainerDied","Data":"6c2f49e8f4d1607718890acde42df8569e1a17c6b27b0d95392b1eda28663d48"} Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559555 4813 scope.go:117] "RemoveContainer" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.559705 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vvpdd" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.571449 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/2.log" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.608832 4813 scope.go:117] "RemoveContainer" containerID="bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.643450 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vvpdd"] Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.650710 4813 scope.go:117] "RemoveContainer" containerID="d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.659072 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vvpdd"] Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.676728 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3bc364e_ed17_44b8_9942_b41e6b8ac13a.slice\": RecentStats: unable to find data in memory cache]" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.688737 4813 scope.go:117] "RemoveContainer" containerID="515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.711109 4813 scope.go:117] "RemoveContainer" containerID="4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.727582 4813 scope.go:117] "RemoveContainer" containerID="5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.752357 4813 scope.go:117] "RemoveContainer" containerID="f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.768197 4813 scope.go:117] "RemoveContainer" containerID="839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.782636 4813 scope.go:117] "RemoveContainer" containerID="e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.796757 4813 scope.go:117] "RemoveContainer" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.797166 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9\": container with ID starting with 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 not found: ID does not exist" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.797270 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9"} err="failed to get container status \"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9\": rpc error: code = NotFound desc = could not find container \"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9\": container with ID starting with 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.797357 4813 scope.go:117] "RemoveContainer" containerID="bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.797708 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\": container with ID starting with bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8 not found: ID does not exist" containerID="bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.797793 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8"} err="failed to get container status \"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\": rpc error: code = NotFound desc = could not find container \"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\": container with ID starting with bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.797861 4813 scope.go:117] "RemoveContainer" containerID="d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.798195 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\": container with ID starting with d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df not found: ID does not exist" containerID="d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.798242 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df"} err="failed to get container status \"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\": rpc error: code = NotFound desc = could not find container \"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\": container with ID starting with d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.798274 4813 scope.go:117] "RemoveContainer" containerID="515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.798597 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\": container with ID starting with 515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed not found: ID does not exist" containerID="515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.798677 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed"} err="failed to get container status \"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\": rpc error: code = NotFound desc = could not find container \"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\": container with ID starting with 515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.798744 4813 scope.go:117] "RemoveContainer" containerID="4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.799115 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\": container with ID starting with 4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628 not found: ID does not exist" containerID="4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.799196 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628"} err="failed to get container status \"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\": rpc error: code = NotFound desc = could not find container \"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\": container with ID starting with 4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.799264 4813 scope.go:117] "RemoveContainer" containerID="5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.799595 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\": container with ID starting with 5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53 not found: ID does not exist" containerID="5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.799680 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53"} err="failed to get container status \"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\": rpc error: code = NotFound desc = could not find container \"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\": container with ID starting with 5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.799738 4813 scope.go:117] "RemoveContainer" containerID="f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.800058 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\": container with ID starting with f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85 not found: ID does not exist" containerID="f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.800130 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85"} err="failed to get container status \"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\": rpc error: code = NotFound desc = could not find container \"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\": container with ID starting with f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.800196 4813 scope.go:117] "RemoveContainer" containerID="839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.800462 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\": container with ID starting with 839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d not found: ID does not exist" containerID="839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.800536 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d"} err="failed to get container status \"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\": rpc error: code = NotFound desc = could not find container \"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\": container with ID starting with 839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.800595 4813 scope.go:117] "RemoveContainer" containerID="e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41" Oct 07 19:27:31 crc kubenswrapper[4813]: E1007 19:27:31.800881 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\": container with ID starting with e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41 not found: ID does not exist" containerID="e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.800959 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41"} err="failed to get container status \"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\": rpc error: code = NotFound desc = could not find container \"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\": container with ID starting with e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.801020 4813 scope.go:117] "RemoveContainer" containerID="352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.801445 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9"} err="failed to get container status \"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9\": rpc error: code = NotFound desc = could not find container \"352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9\": container with ID starting with 352033aec3853abf27088da8b6f077b68adc154befff187ed44afeb7a654c2e9 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.801484 4813 scope.go:117] "RemoveContainer" containerID="bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.802141 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8"} err="failed to get container status \"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\": rpc error: code = NotFound desc = could not find container \"bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8\": container with ID starting with bdb2551a82c5a23a538d4c789fa2f99867b126644cb4612f750ca5cd854a79e8 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.802178 4813 scope.go:117] "RemoveContainer" containerID="d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.802428 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df"} err="failed to get container status \"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\": rpc error: code = NotFound desc = could not find container \"d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df\": container with ID starting with d100e800bb2ca7fb6223e7f2527cfc1653d4a99b05f7b235037522104f26a4df not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.802449 4813 scope.go:117] "RemoveContainer" containerID="515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.804167 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed"} err="failed to get container status \"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\": rpc error: code = NotFound desc = could not find container \"515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed\": container with ID starting with 515fcd5a9aa00fc6ce1a610c39310d1f95a8406d377e10612d9ed3a204b45eed not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.804190 4813 scope.go:117] "RemoveContainer" containerID="4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.804454 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628"} err="failed to get container status \"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\": rpc error: code = NotFound desc = could not find container \"4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628\": container with ID starting with 4e827621f6eadd235c801fe2e8a073e9cff8f2d3e5b26abeeff42d14979f4628 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.804547 4813 scope.go:117] "RemoveContainer" containerID="5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.804833 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53"} err="failed to get container status \"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\": rpc error: code = NotFound desc = could not find container \"5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53\": container with ID starting with 5d856cc4b09834a50a0f4f116f6fbe854685d141fd28efaf6a2369b45622bf53 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.804929 4813 scope.go:117] "RemoveContainer" containerID="f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.805273 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85"} err="failed to get container status \"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\": rpc error: code = NotFound desc = could not find container \"f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85\": container with ID starting with f22e82b4e1dbcf12f01c4465b27e60a4393c0251032007731fdea00f80856a85 not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.805448 4813 scope.go:117] "RemoveContainer" containerID="839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.805719 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d"} err="failed to get container status \"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\": rpc error: code = NotFound desc = could not find container \"839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d\": container with ID starting with 839167b8b4290fd2126fae40ea9a8e6693f445c5df8855fee63c4b6f921b412d not found: ID does not exist" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.806049 4813 scope.go:117] "RemoveContainer" containerID="e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41" Oct 07 19:27:31 crc kubenswrapper[4813]: I1007 19:27:31.806354 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41"} err="failed to get container status \"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\": rpc error: code = NotFound desc = could not find container \"e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41\": container with ID starting with e47cf6f1d2b164a8bfcc2a8e7b722a5395cebf621c78f691012955bf9164ea41 not found: ID does not exist" Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.580797 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"b334ce1cae446ec20ce45b2c476c313ae5a5e115c3b853ee2471e83fb341f164"} Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.581155 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"ebfcb85e971e1a31d1031805b26e6ee819186c048d2dbf4506e49f0363b22823"} Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.581174 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"0d6c436a0c54934faa125b2ceb2aa5e809195512c05e9c59a1ac66439104e10b"} Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.581186 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"9cc7e6dc24410e7edee776de2de3961e46e42a40c65407d7d42913bca6c03285"} Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.581198 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"789fbf28966814f6b2d3681affde37cecb80ccda215c26120208c49d1a40797b"} Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.581210 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"4479a62cc17e60398712433512de06f8230a75ed139e2e9e7b9348c9a484251f"} Oct 07 19:27:32 crc kubenswrapper[4813]: I1007 19:27:32.609791 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3bc364e-ed17-44b8-9942-b41e6b8ac13a" path="/var/lib/kubelet/pods/e3bc364e-ed17-44b8-9942-b41e6b8ac13a/volumes" Oct 07 19:27:34 crc kubenswrapper[4813]: I1007 19:27:34.597985 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"e1d962353b2774546c03482b61aa36828a61e0cba81e697f951abcc0660a571c"} Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.619558 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" event={"ID":"e49d6663-d2b8-4e00-93ac-df1f3c14f9b1","Type":"ContainerStarted","Data":"8ab0fd9f8523fa2f6100109e70b49ced39b8768f4a433df7f0250df8459daea1"} Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.620475 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.620642 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.620919 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.658663 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" podStartSLOduration=7.658638649 podStartE2EDuration="7.658638649s" podCreationTimestamp="2025-10-07 19:27:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:27:37.656430513 +0000 UTC m=+583.734686144" watchObservedRunningTime="2025-10-07 19:27:37.658638649 +0000 UTC m=+583.736894280" Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.677209 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:37 crc kubenswrapper[4813]: I1007 19:27:37.680293 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:27:41 crc kubenswrapper[4813]: I1007 19:27:41.603162 4813 scope.go:117] "RemoveContainer" containerID="ea12b0917e0500eb874905c967fd2d8059d5c17db195a77e9f1face4a7c3548c" Oct 07 19:27:41 crc kubenswrapper[4813]: E1007 19:27:41.604020 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gbxzg_openshift-multus(76e24ee5-81b1-4538-aca5-141e399e32e9)\"" pod="openshift-multus/multus-gbxzg" podUID="76e24ee5-81b1-4538-aca5-141e399e32e9" Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.079150 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.079740 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.079804 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.080532 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b78c84794157bdaf4c6d8429f03a3dc0ddbbcbef98ccb9a89291d17bfc31a4dd"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.080600 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://b78c84794157bdaf4c6d8429f03a3dc0ddbbcbef98ccb9a89291d17bfc31a4dd" gracePeriod=600 Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.715945 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="b78c84794157bdaf4c6d8429f03a3dc0ddbbcbef98ccb9a89291d17bfc31a4dd" exitCode=0 Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.715993 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"b78c84794157bdaf4c6d8429f03a3dc0ddbbcbef98ccb9a89291d17bfc31a4dd"} Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.716352 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"65fe0b5a9444ed388154693078866b82b9f87cf7cbddae0e9656f26066276d1a"} Oct 07 19:27:52 crc kubenswrapper[4813]: I1007 19:27:52.716386 4813 scope.go:117] "RemoveContainer" containerID="0830cd9c6c944f536089b77bf873249e6b2b285e17b46cc92095ca9afc2e0ff7" Oct 07 19:27:56 crc kubenswrapper[4813]: I1007 19:27:56.605059 4813 scope.go:117] "RemoveContainer" containerID="ea12b0917e0500eb874905c967fd2d8059d5c17db195a77e9f1face4a7c3548c" Oct 07 19:27:57 crc kubenswrapper[4813]: I1007 19:27:57.763914 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbxzg_76e24ee5-81b1-4538-aca5-141e399e32e9/kube-multus/2.log" Oct 07 19:27:57 crc kubenswrapper[4813]: I1007 19:27:57.764342 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbxzg" event={"ID":"76e24ee5-81b1-4538-aca5-141e399e32e9","Type":"ContainerStarted","Data":"90cd474e9b3c5222bfe9fc714a46cf42e3e52b95a0b38721b40e21457dd5d528"} Oct 07 19:28:01 crc kubenswrapper[4813]: I1007 19:28:01.016020 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ctkbx" Oct 07 19:28:09 crc kubenswrapper[4813]: I1007 19:28:09.967565 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8"] Oct 07 19:28:09 crc kubenswrapper[4813]: I1007 19:28:09.969411 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:09 crc kubenswrapper[4813]: I1007 19:28:09.971997 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 19:28:09 crc kubenswrapper[4813]: I1007 19:28:09.982296 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8"] Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.148211 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.148263 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59tn5\" (UniqueName: \"kubernetes.io/projected/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-kube-api-access-59tn5\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.148305 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.249284 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.250045 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.250291 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59tn5\" (UniqueName: \"kubernetes.io/projected/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-kube-api-access-59tn5\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.250188 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.250727 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.286758 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59tn5\" (UniqueName: \"kubernetes.io/projected/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-kube-api-access-59tn5\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.584736 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:10 crc kubenswrapper[4813]: I1007 19:28:10.877298 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8"] Oct 07 19:28:11 crc kubenswrapper[4813]: I1007 19:28:11.848797 4813 generic.go:334] "Generic (PLEG): container finished" podID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerID="b3aac490cd69ec3b3e18ad71af28051586ba2af31ad8e8cbbb967c6ec92a7b58" exitCode=0 Oct 07 19:28:11 crc kubenswrapper[4813]: I1007 19:28:11.848903 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" event={"ID":"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2","Type":"ContainerDied","Data":"b3aac490cd69ec3b3e18ad71af28051586ba2af31ad8e8cbbb967c6ec92a7b58"} Oct 07 19:28:11 crc kubenswrapper[4813]: I1007 19:28:11.849140 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" event={"ID":"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2","Type":"ContainerStarted","Data":"f5b8a233e1ce594eed9f01b61bb87984de770550fd919f2b0755ab5a0856ee66"} Oct 07 19:28:13 crc kubenswrapper[4813]: I1007 19:28:13.864173 4813 generic.go:334] "Generic (PLEG): container finished" podID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerID="c2c073879930b4520d0e3d02feb3fd43f65ec07b17b25bd3919dafd78886bebc" exitCode=0 Oct 07 19:28:13 crc kubenswrapper[4813]: I1007 19:28:13.864238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" event={"ID":"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2","Type":"ContainerDied","Data":"c2c073879930b4520d0e3d02feb3fd43f65ec07b17b25bd3919dafd78886bebc"} Oct 07 19:28:14 crc kubenswrapper[4813]: I1007 19:28:14.874219 4813 generic.go:334] "Generic (PLEG): container finished" podID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerID="2164e187f54b1db126e37d9e5e34886aef491bbb91a6e44a8451e33bf0dca943" exitCode=0 Oct 07 19:28:14 crc kubenswrapper[4813]: I1007 19:28:14.874392 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" event={"ID":"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2","Type":"ContainerDied","Data":"2164e187f54b1db126e37d9e5e34886aef491bbb91a6e44a8451e33bf0dca943"} Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.126851 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.236650 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-util\") pod \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.236706 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59tn5\" (UniqueName: \"kubernetes.io/projected/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-kube-api-access-59tn5\") pod \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.236752 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-bundle\") pod \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\" (UID: \"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2\") " Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.237362 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-bundle" (OuterVolumeSpecName: "bundle") pod "cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" (UID: "cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.244067 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-kube-api-access-59tn5" (OuterVolumeSpecName: "kube-api-access-59tn5") pod "cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" (UID: "cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2"). InnerVolumeSpecName "kube-api-access-59tn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.269137 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-util" (OuterVolumeSpecName: "util") pod "cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" (UID: "cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.338092 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-util\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.338153 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59tn5\" (UniqueName: \"kubernetes.io/projected/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-kube-api-access-59tn5\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.338180 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.886938 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" event={"ID":"cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2","Type":"ContainerDied","Data":"f5b8a233e1ce594eed9f01b61bb87984de770550fd919f2b0755ab5a0856ee66"} Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.886982 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5b8a233e1ce594eed9f01b61bb87984de770550fd919f2b0755ab5a0856ee66" Oct 07 19:28:16 crc kubenswrapper[4813]: I1007 19:28:16.887022 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.564087 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8"] Oct 07 19:28:18 crc kubenswrapper[4813]: E1007 19:28:18.564562 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="pull" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.564573 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="pull" Oct 07 19:28:18 crc kubenswrapper[4813]: E1007 19:28:18.564590 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="extract" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.564597 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="extract" Oct 07 19:28:18 crc kubenswrapper[4813]: E1007 19:28:18.564605 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="util" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.564611 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="util" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.564703 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2" containerName="extract" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.565048 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.569634 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.569685 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-rr2ww" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.572650 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.582120 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8"] Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.673734 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fmdr\" (UniqueName: \"kubernetes.io/projected/8350f82b-1b55-4571-83a0-14a18f238c51-kube-api-access-8fmdr\") pod \"nmstate-operator-858ddd8f98-vx5d8\" (UID: \"8350f82b-1b55-4571-83a0-14a18f238c51\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.775209 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fmdr\" (UniqueName: \"kubernetes.io/projected/8350f82b-1b55-4571-83a0-14a18f238c51-kube-api-access-8fmdr\") pod \"nmstate-operator-858ddd8f98-vx5d8\" (UID: \"8350f82b-1b55-4571-83a0-14a18f238c51\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.794987 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fmdr\" (UniqueName: \"kubernetes.io/projected/8350f82b-1b55-4571-83a0-14a18f238c51-kube-api-access-8fmdr\") pod \"nmstate-operator-858ddd8f98-vx5d8\" (UID: \"8350f82b-1b55-4571-83a0-14a18f238c51\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" Oct 07 19:28:18 crc kubenswrapper[4813]: I1007 19:28:18.878086 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" Oct 07 19:28:19 crc kubenswrapper[4813]: I1007 19:28:19.087701 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8"] Oct 07 19:28:19 crc kubenswrapper[4813]: W1007 19:28:19.096604 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8350f82b_1b55_4571_83a0_14a18f238c51.slice/crio-99a8706426d31e2de505e7eccd57cc3953bb75ca7c2cea2dc465f8ba990d111e WatchSource:0}: Error finding container 99a8706426d31e2de505e7eccd57cc3953bb75ca7c2cea2dc465f8ba990d111e: Status 404 returned error can't find the container with id 99a8706426d31e2de505e7eccd57cc3953bb75ca7c2cea2dc465f8ba990d111e Oct 07 19:28:19 crc kubenswrapper[4813]: I1007 19:28:19.910978 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" event={"ID":"8350f82b-1b55-4571-83a0-14a18f238c51","Type":"ContainerStarted","Data":"99a8706426d31e2de505e7eccd57cc3953bb75ca7c2cea2dc465f8ba990d111e"} Oct 07 19:28:21 crc kubenswrapper[4813]: I1007 19:28:21.921357 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" event={"ID":"8350f82b-1b55-4571-83a0-14a18f238c51","Type":"ContainerStarted","Data":"1a66700182f86796b6eaf43d8e6ffeb6b16750cb6eb86c57d57a6aac48983a4a"} Oct 07 19:28:21 crc kubenswrapper[4813]: I1007 19:28:21.943720 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-vx5d8" podStartSLOduration=1.54225215 podStartE2EDuration="3.94370407s" podCreationTimestamp="2025-10-07 19:28:18 +0000 UTC" firstStartedPulling="2025-10-07 19:28:19.102265598 +0000 UTC m=+625.180521209" lastFinishedPulling="2025-10-07 19:28:21.503717518 +0000 UTC m=+627.581973129" observedRunningTime="2025-10-07 19:28:21.943352819 +0000 UTC m=+628.021608430" watchObservedRunningTime="2025-10-07 19:28:21.94370407 +0000 UTC m=+628.021959681" Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.910335 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5"] Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.911714 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.925682 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-z5qpl" Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.944333 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5"] Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.957227 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4"] Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.957987 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.967839 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.989044 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4"] Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.996060 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-bw2mb"] Oct 07 19:28:22 crc kubenswrapper[4813]: I1007 19:28:22.996893 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.026011 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdm6z\" (UniqueName: \"kubernetes.io/projected/3525607e-5512-4d19-a0ce-42df574e763a-kube-api-access-vdm6z\") pod \"nmstate-metrics-fdff9cb8d-7mqw5\" (UID: \"3525607e-5512-4d19-a0ce-42df574e763a\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.115390 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf"] Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.116089 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.118595 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wjqqc" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.118921 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.119071 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127691 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdm6z\" (UniqueName: \"kubernetes.io/projected/3525607e-5512-4d19-a0ce-42df574e763a-kube-api-access-vdm6z\") pod \"nmstate-metrics-fdff9cb8d-7mqw5\" (UID: \"3525607e-5512-4d19-a0ce-42df574e763a\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127736 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-nmstate-lock\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127780 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d66b41fa-f25e-4dd5-8f30-f496940d7d19-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sr7d4\" (UID: \"d66b41fa-f25e-4dd5-8f30-f496940d7d19\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127804 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzv99\" (UniqueName: \"kubernetes.io/projected/d66b41fa-f25e-4dd5-8f30-f496940d7d19-kube-api-access-qzv99\") pod \"nmstate-webhook-6cdbc54649-sr7d4\" (UID: \"d66b41fa-f25e-4dd5-8f30-f496940d7d19\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127825 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-ovs-socket\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127844 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-dbus-socket\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.127875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmgk9\" (UniqueName: \"kubernetes.io/projected/4fe83b69-4076-411a-b34e-fd61c901eb03-kube-api-access-gmgk9\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.152412 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdm6z\" (UniqueName: \"kubernetes.io/projected/3525607e-5512-4d19-a0ce-42df574e763a-kube-api-access-vdm6z\") pod \"nmstate-metrics-fdff9cb8d-7mqw5\" (UID: \"3525607e-5512-4d19-a0ce-42df574e763a\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.159642 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf"] Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.225453 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229423 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmz4m\" (UniqueName: \"kubernetes.io/projected/56036d68-a088-4f16-8fce-0c11b7c9c4e3-kube-api-access-kmz4m\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229479 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmgk9\" (UniqueName: \"kubernetes.io/projected/4fe83b69-4076-411a-b34e-fd61c901eb03-kube-api-access-gmgk9\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229517 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/56036d68-a088-4f16-8fce-0c11b7c9c4e3-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229541 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-nmstate-lock\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229568 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d66b41fa-f25e-4dd5-8f30-f496940d7d19-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sr7d4\" (UID: \"d66b41fa-f25e-4dd5-8f30-f496940d7d19\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229590 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/56036d68-a088-4f16-8fce-0c11b7c9c4e3-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229606 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzv99\" (UniqueName: \"kubernetes.io/projected/d66b41fa-f25e-4dd5-8f30-f496940d7d19-kube-api-access-qzv99\") pod \"nmstate-webhook-6cdbc54649-sr7d4\" (UID: \"d66b41fa-f25e-4dd5-8f30-f496940d7d19\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229626 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-ovs-socket\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229644 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-dbus-socket\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.229890 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-dbus-socket\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.230183 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-nmstate-lock\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.233294 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d66b41fa-f25e-4dd5-8f30-f496940d7d19-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sr7d4\" (UID: \"d66b41fa-f25e-4dd5-8f30-f496940d7d19\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.233475 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/4fe83b69-4076-411a-b34e-fd61c901eb03-ovs-socket\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.255357 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzv99\" (UniqueName: \"kubernetes.io/projected/d66b41fa-f25e-4dd5-8f30-f496940d7d19-kube-api-access-qzv99\") pod \"nmstate-webhook-6cdbc54649-sr7d4\" (UID: \"d66b41fa-f25e-4dd5-8f30-f496940d7d19\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.256015 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmgk9\" (UniqueName: \"kubernetes.io/projected/4fe83b69-4076-411a-b34e-fd61c901eb03-kube-api-access-gmgk9\") pod \"nmstate-handler-bw2mb\" (UID: \"4fe83b69-4076-411a-b34e-fd61c901eb03\") " pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.281594 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.315237 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.338247 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-fb7cfdb67-7cvpf"] Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.339081 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.347034 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/56036d68-a088-4f16-8fce-0c11b7c9c4e3-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.347388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmz4m\" (UniqueName: \"kubernetes.io/projected/56036d68-a088-4f16-8fce-0c11b7c9c4e3-kube-api-access-kmz4m\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.347749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/56036d68-a088-4f16-8fce-0c11b7c9c4e3-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.348612 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/56036d68-a088-4f16-8fce-0c11b7c9c4e3-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.353185 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/56036d68-a088-4f16-8fce-0c11b7c9c4e3-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.363304 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-fb7cfdb67-7cvpf"] Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.394363 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmz4m\" (UniqueName: \"kubernetes.io/projected/56036d68-a088-4f16-8fce-0c11b7c9c4e3-kube-api-access-kmz4m\") pod \"nmstate-console-plugin-6b874cbd85-2fdzf\" (UID: \"56036d68-a088-4f16-8fce-0c11b7c9c4e3\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.428895 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451282 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58f55\" (UniqueName: \"kubernetes.io/projected/d0ff5591-b912-4f85-bdcf-c268a3f1c483-kube-api-access-58f55\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451405 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-oauth-config\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451447 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-service-ca\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451469 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-oauth-serving-cert\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451508 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-config\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-serving-cert\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.451569 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-trusted-ca-bundle\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552585 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-serving-cert\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552619 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-trusted-ca-bundle\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552652 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58f55\" (UniqueName: \"kubernetes.io/projected/d0ff5591-b912-4f85-bdcf-c268a3f1c483-kube-api-access-58f55\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-oauth-config\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552692 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-service-ca\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-oauth-serving-cert\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.552744 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-config\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.553716 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-trusted-ca-bundle\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.555335 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-config\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.555888 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-service-ca\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.555943 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/d0ff5591-b912-4f85-bdcf-c268a3f1c483-oauth-serving-cert\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.559690 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-serving-cert\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.562219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/d0ff5591-b912-4f85-bdcf-c268a3f1c483-console-oauth-config\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.565083 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5"] Oct 07 19:28:23 crc kubenswrapper[4813]: W1007 19:28:23.583111 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3525607e_5512_4d19_a0ce_42df574e763a.slice/crio-874f44338889d6080400142c87d4ad12f184a6460725feb377a04e6b87b89f95 WatchSource:0}: Error finding container 874f44338889d6080400142c87d4ad12f184a6460725feb377a04e6b87b89f95: Status 404 returned error can't find the container with id 874f44338889d6080400142c87d4ad12f184a6460725feb377a04e6b87b89f95 Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.584236 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58f55\" (UniqueName: \"kubernetes.io/projected/d0ff5591-b912-4f85-bdcf-c268a3f1c483-kube-api-access-58f55\") pod \"console-fb7cfdb67-7cvpf\" (UID: \"d0ff5591-b912-4f85-bdcf-c268a3f1c483\") " pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.629744 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4"] Oct 07 19:28:23 crc kubenswrapper[4813]: W1007 19:28:23.635855 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd66b41fa_f25e_4dd5_8f30_f496940d7d19.slice/crio-c50c5a9d0e6c08dd0cdea74e63b79c23e6f24165d88a30c0861f90fb8192551c WatchSource:0}: Error finding container c50c5a9d0e6c08dd0cdea74e63b79c23e6f24165d88a30c0861f90fb8192551c: Status 404 returned error can't find the container with id c50c5a9d0e6c08dd0cdea74e63b79c23e6f24165d88a30c0861f90fb8192551c Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.687783 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.695036 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf"] Oct 07 19:28:23 crc kubenswrapper[4813]: W1007 19:28:23.710186 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56036d68_a088_4f16_8fce_0c11b7c9c4e3.slice/crio-4b4826599d1226bfdfceab09d12b6781f66e73b3f52286b72dada8e235e8b857 WatchSource:0}: Error finding container 4b4826599d1226bfdfceab09d12b6781f66e73b3f52286b72dada8e235e8b857: Status 404 returned error can't find the container with id 4b4826599d1226bfdfceab09d12b6781f66e73b3f52286b72dada8e235e8b857 Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.853116 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-fb7cfdb67-7cvpf"] Oct 07 19:28:23 crc kubenswrapper[4813]: W1007 19:28:23.858733 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0ff5591_b912_4f85_bdcf_c268a3f1c483.slice/crio-d00dc62307fdb1126ce4c449789874c297b2b374ddca48ab0ce2761bf00ecf9f WatchSource:0}: Error finding container d00dc62307fdb1126ce4c449789874c297b2b374ddca48ab0ce2761bf00ecf9f: Status 404 returned error can't find the container with id d00dc62307fdb1126ce4c449789874c297b2b374ddca48ab0ce2761bf00ecf9f Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.937900 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" event={"ID":"d66b41fa-f25e-4dd5-8f30-f496940d7d19","Type":"ContainerStarted","Data":"c50c5a9d0e6c08dd0cdea74e63b79c23e6f24165d88a30c0861f90fb8192551c"} Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.939862 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" event={"ID":"56036d68-a088-4f16-8fce-0c11b7c9c4e3","Type":"ContainerStarted","Data":"4b4826599d1226bfdfceab09d12b6781f66e73b3f52286b72dada8e235e8b857"} Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.941587 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" event={"ID":"3525607e-5512-4d19-a0ce-42df574e763a","Type":"ContainerStarted","Data":"874f44338889d6080400142c87d4ad12f184a6460725feb377a04e6b87b89f95"} Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.943799 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-fb7cfdb67-7cvpf" event={"ID":"d0ff5591-b912-4f85-bdcf-c268a3f1c483","Type":"ContainerStarted","Data":"d00dc62307fdb1126ce4c449789874c297b2b374ddca48ab0ce2761bf00ecf9f"} Oct 07 19:28:23 crc kubenswrapper[4813]: I1007 19:28:23.944924 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-bw2mb" event={"ID":"4fe83b69-4076-411a-b34e-fd61c901eb03","Type":"ContainerStarted","Data":"2d9f8806ba1e3fa39e1ce4ed3cdf8fe9df91526c61c1adee498f083f1fe82a5a"} Oct 07 19:28:24 crc kubenswrapper[4813]: I1007 19:28:24.951032 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-fb7cfdb67-7cvpf" event={"ID":"d0ff5591-b912-4f85-bdcf-c268a3f1c483","Type":"ContainerStarted","Data":"36f662823cd687dd39d6db067288f6480b9732685ea7d2c4bf639fe947ba67ff"} Oct 07 19:28:24 crc kubenswrapper[4813]: I1007 19:28:24.968579 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-fb7cfdb67-7cvpf" podStartSLOduration=1.968552884 podStartE2EDuration="1.968552884s" podCreationTimestamp="2025-10-07 19:28:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:28:24.96575865 +0000 UTC m=+631.044014371" watchObservedRunningTime="2025-10-07 19:28:24.968552884 +0000 UTC m=+631.046808505" Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.967603 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-bw2mb" event={"ID":"4fe83b69-4076-411a-b34e-fd61c901eb03","Type":"ContainerStarted","Data":"2b84ba9891e2e057bdc5f5ba29ec1adf83a83bd42ab4ba4595f8beba9a103523"} Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.967992 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.970360 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" event={"ID":"d66b41fa-f25e-4dd5-8f30-f496940d7d19","Type":"ContainerStarted","Data":"79532843baa23206f098a8fa115e10aa0e3aee49c027835489c6ac0f38b70bcf"} Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.970904 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.974351 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" event={"ID":"56036d68-a088-4f16-8fce-0c11b7c9c4e3","Type":"ContainerStarted","Data":"68be95db1868c8615537d1965a1ab483017679c0eac3c86492f3a21e11a39550"} Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.976122 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" event={"ID":"3525607e-5512-4d19-a0ce-42df574e763a","Type":"ContainerStarted","Data":"2da8812da6c583c5c30818dadb810b806e8dcbae4e0a8e6a3f79ea64d7aca053"} Oct 07 19:28:26 crc kubenswrapper[4813]: I1007 19:28:26.994587 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-bw2mb" podStartSLOduration=1.9327896230000001 podStartE2EDuration="4.994569577s" podCreationTimestamp="2025-10-07 19:28:22 +0000 UTC" firstStartedPulling="2025-10-07 19:28:23.388016727 +0000 UTC m=+629.466272338" lastFinishedPulling="2025-10-07 19:28:26.449796681 +0000 UTC m=+632.528052292" observedRunningTime="2025-10-07 19:28:26.985789764 +0000 UTC m=+633.064045405" watchObservedRunningTime="2025-10-07 19:28:26.994569577 +0000 UTC m=+633.072825198" Oct 07 19:28:27 crc kubenswrapper[4813]: I1007 19:28:27.011816 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-2fdzf" podStartSLOduration=1.292196758 podStartE2EDuration="4.011790714s" podCreationTimestamp="2025-10-07 19:28:23 +0000 UTC" firstStartedPulling="2025-10-07 19:28:23.712805873 +0000 UTC m=+629.791061484" lastFinishedPulling="2025-10-07 19:28:26.432399829 +0000 UTC m=+632.510655440" observedRunningTime="2025-10-07 19:28:27.000796434 +0000 UTC m=+633.079052075" watchObservedRunningTime="2025-10-07 19:28:27.011790714 +0000 UTC m=+633.090046315" Oct 07 19:28:29 crc kubenswrapper[4813]: I1007 19:28:29.994659 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" event={"ID":"3525607e-5512-4d19-a0ce-42df574e763a","Type":"ContainerStarted","Data":"a4376e01d5d5770db19276904731a9b4ffc503b8e02d42f5aa4536a7b648ad77"} Oct 07 19:28:30 crc kubenswrapper[4813]: I1007 19:28:30.025268 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-7mqw5" podStartSLOduration=2.50885091 podStartE2EDuration="8.025236118s" podCreationTimestamp="2025-10-07 19:28:22 +0000 UTC" firstStartedPulling="2025-10-07 19:28:23.589601546 +0000 UTC m=+629.667857157" lastFinishedPulling="2025-10-07 19:28:29.105986744 +0000 UTC m=+635.184242365" observedRunningTime="2025-10-07 19:28:30.01664142 +0000 UTC m=+636.094897101" watchObservedRunningTime="2025-10-07 19:28:30.025236118 +0000 UTC m=+636.103491769" Oct 07 19:28:30 crc kubenswrapper[4813]: I1007 19:28:30.029349 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" podStartSLOduration=5.235009312 podStartE2EDuration="8.02930638s" podCreationTimestamp="2025-10-07 19:28:22 +0000 UTC" firstStartedPulling="2025-10-07 19:28:23.638114482 +0000 UTC m=+629.716370093" lastFinishedPulling="2025-10-07 19:28:26.43241155 +0000 UTC m=+632.510667161" observedRunningTime="2025-10-07 19:28:27.034065962 +0000 UTC m=+633.112321623" watchObservedRunningTime="2025-10-07 19:28:30.02930638 +0000 UTC m=+636.107562031" Oct 07 19:28:33 crc kubenswrapper[4813]: I1007 19:28:33.343782 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-bw2mb" Oct 07 19:28:33 crc kubenswrapper[4813]: I1007 19:28:33.688579 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:33 crc kubenswrapper[4813]: I1007 19:28:33.688667 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:33 crc kubenswrapper[4813]: I1007 19:28:33.699908 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:34 crc kubenswrapper[4813]: I1007 19:28:34.029482 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-fb7cfdb67-7cvpf" Oct 07 19:28:34 crc kubenswrapper[4813]: I1007 19:28:34.101200 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-ddzzh"] Oct 07 19:28:43 crc kubenswrapper[4813]: I1007 19:28:43.289257 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sr7d4" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.212675 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6"] Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.215042 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.224364 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.225607 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6"] Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.313012 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prxn9\" (UniqueName: \"kubernetes.io/projected/2d920201-8633-4cea-9d52-95f13d4e80ec-kube-api-access-prxn9\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.313077 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.313157 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.414980 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.415184 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.415799 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prxn9\" (UniqueName: \"kubernetes.io/projected/2d920201-8633-4cea-9d52-95f13d4e80ec-kube-api-access-prxn9\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.416407 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.416537 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.438837 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prxn9\" (UniqueName: \"kubernetes.io/projected/2d920201-8633-4cea-9d52-95f13d4e80ec-kube-api-access-prxn9\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.534662 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:28:58 crc kubenswrapper[4813]: I1007 19:28:58.753834 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6"] Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.170357 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-ddzzh" podUID="ba658322-d68e-4312-8283-4da69865e460" containerName="console" containerID="cri-o://6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d" gracePeriod=15 Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.190979 4813 generic.go:334] "Generic (PLEG): container finished" podID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerID="a469a09b511767bafc9e3dab841309be248a07fde607d792f15701157f589f81" exitCode=0 Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.191038 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" event={"ID":"2d920201-8633-4cea-9d52-95f13d4e80ec","Type":"ContainerDied","Data":"a469a09b511767bafc9e3dab841309be248a07fde607d792f15701157f589f81"} Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.193480 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" event={"ID":"2d920201-8633-4cea-9d52-95f13d4e80ec","Type":"ContainerStarted","Data":"f730f2181e5210d19dd8b86619a9fe41297574ecf0ad9a90503bd1dd1e8f0e4c"} Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.522239 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-ddzzh_ba658322-d68e-4312-8283-4da69865e460/console/0.log" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.522359 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644116 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-service-ca" (OuterVolumeSpecName: "service-ca") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.643076 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-service-ca\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644297 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-serving-cert\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644424 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmltl\" (UniqueName: \"kubernetes.io/projected/ba658322-d68e-4312-8283-4da69865e460-kube-api-access-qmltl\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644467 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-oauth-serving-cert\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644519 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-trusted-ca-bundle\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644569 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-oauth-config\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.644603 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-console-config\") pod \"ba658322-d68e-4312-8283-4da69865e460\" (UID: \"ba658322-d68e-4312-8283-4da69865e460\") " Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.645150 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.645198 4813 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-service-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.645194 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.645302 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-console-config" (OuterVolumeSpecName: "console-config") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.652852 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.655947 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba658322-d68e-4312-8283-4da69865e460-kube-api-access-qmltl" (OuterVolumeSpecName: "kube-api-access-qmltl") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "kube-api-access-qmltl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.660753 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ba658322-d68e-4312-8283-4da69865e460" (UID: "ba658322-d68e-4312-8283-4da69865e460"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.746831 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmltl\" (UniqueName: \"kubernetes.io/projected/ba658322-d68e-4312-8283-4da69865e460-kube-api-access-qmltl\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.746873 4813 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.746886 4813 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.746898 4813 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba658322-d68e-4312-8283-4da69865e460-console-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.746909 4813 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:28:59 crc kubenswrapper[4813]: I1007 19:28:59.746920 4813 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba658322-d68e-4312-8283-4da69865e460-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.197502 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-ddzzh_ba658322-d68e-4312-8283-4da69865e460/console/0.log" Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.197774 4813 generic.go:334] "Generic (PLEG): container finished" podID="ba658322-d68e-4312-8283-4da69865e460" containerID="6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d" exitCode=2 Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.197806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ddzzh" event={"ID":"ba658322-d68e-4312-8283-4da69865e460","Type":"ContainerDied","Data":"6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d"} Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.197840 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-ddzzh" event={"ID":"ba658322-d68e-4312-8283-4da69865e460","Type":"ContainerDied","Data":"d139b91ad6ad2502f54ff8874ed5d7bfc9d31e91639762e5fcbf6c4da5b9de0d"} Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.197858 4813 scope.go:117] "RemoveContainer" containerID="6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d" Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.197868 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-ddzzh" Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.215107 4813 scope.go:117] "RemoveContainer" containerID="6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d" Oct 07 19:29:00 crc kubenswrapper[4813]: E1007 19:29:00.215684 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d\": container with ID starting with 6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d not found: ID does not exist" containerID="6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d" Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.215727 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d"} err="failed to get container status \"6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d\": rpc error: code = NotFound desc = could not find container \"6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d\": container with ID starting with 6013e8e7ffd54e2ccbb62bb7b09816ad6a084155b21550a0d1966e359cdf8a9d not found: ID does not exist" Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.228201 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-ddzzh"] Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.231957 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-ddzzh"] Oct 07 19:29:00 crc kubenswrapper[4813]: I1007 19:29:00.616003 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba658322-d68e-4312-8283-4da69865e460" path="/var/lib/kubelet/pods/ba658322-d68e-4312-8283-4da69865e460/volumes" Oct 07 19:29:01 crc kubenswrapper[4813]: I1007 19:29:01.211717 4813 generic.go:334] "Generic (PLEG): container finished" podID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerID="0f29b3f4ac19f37746e589a22600ab3dce5423d321b346b8a209eec545c7e9a7" exitCode=0 Oct 07 19:29:01 crc kubenswrapper[4813]: I1007 19:29:01.211795 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" event={"ID":"2d920201-8633-4cea-9d52-95f13d4e80ec","Type":"ContainerDied","Data":"0f29b3f4ac19f37746e589a22600ab3dce5423d321b346b8a209eec545c7e9a7"} Oct 07 19:29:02 crc kubenswrapper[4813]: I1007 19:29:02.221734 4813 generic.go:334] "Generic (PLEG): container finished" podID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerID="a8fefec54d13e85028eaa5c90b1866b810ce2fc8c7ea3f7c4ab3b06d38468648" exitCode=0 Oct 07 19:29:02 crc kubenswrapper[4813]: I1007 19:29:02.221851 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" event={"ID":"2d920201-8633-4cea-9d52-95f13d4e80ec","Type":"ContainerDied","Data":"a8fefec54d13e85028eaa5c90b1866b810ce2fc8c7ea3f7c4ab3b06d38468648"} Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.508293 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.699121 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prxn9\" (UniqueName: \"kubernetes.io/projected/2d920201-8633-4cea-9d52-95f13d4e80ec-kube-api-access-prxn9\") pod \"2d920201-8633-4cea-9d52-95f13d4e80ec\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.699217 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-bundle\") pod \"2d920201-8633-4cea-9d52-95f13d4e80ec\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.699248 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-util\") pod \"2d920201-8633-4cea-9d52-95f13d4e80ec\" (UID: \"2d920201-8633-4cea-9d52-95f13d4e80ec\") " Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.701270 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-bundle" (OuterVolumeSpecName: "bundle") pod "2d920201-8633-4cea-9d52-95f13d4e80ec" (UID: "2d920201-8633-4cea-9d52-95f13d4e80ec"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.711546 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d920201-8633-4cea-9d52-95f13d4e80ec-kube-api-access-prxn9" (OuterVolumeSpecName: "kube-api-access-prxn9") pod "2d920201-8633-4cea-9d52-95f13d4e80ec" (UID: "2d920201-8633-4cea-9d52-95f13d4e80ec"). InnerVolumeSpecName "kube-api-access-prxn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.716625 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-util" (OuterVolumeSpecName: "util") pod "2d920201-8633-4cea-9d52-95f13d4e80ec" (UID: "2d920201-8633-4cea-9d52-95f13d4e80ec"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.800359 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prxn9\" (UniqueName: \"kubernetes.io/projected/2d920201-8633-4cea-9d52-95f13d4e80ec-kube-api-access-prxn9\") on node \"crc\" DevicePath \"\"" Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.800394 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:29:03 crc kubenswrapper[4813]: I1007 19:29:03.800405 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2d920201-8633-4cea-9d52-95f13d4e80ec-util\") on node \"crc\" DevicePath \"\"" Oct 07 19:29:04 crc kubenswrapper[4813]: I1007 19:29:04.256021 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" event={"ID":"2d920201-8633-4cea-9d52-95f13d4e80ec","Type":"ContainerDied","Data":"f730f2181e5210d19dd8b86619a9fe41297574ecf0ad9a90503bd1dd1e8f0e4c"} Oct 07 19:29:04 crc kubenswrapper[4813]: I1007 19:29:04.256083 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f730f2181e5210d19dd8b86619a9fe41297574ecf0ad9a90503bd1dd1e8f0e4c" Oct 07 19:29:04 crc kubenswrapper[4813]: I1007 19:29:04.256459 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.176789 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-66cfc88647-54n75"] Oct 07 19:29:12 crc kubenswrapper[4813]: E1007 19:29:12.177643 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="pull" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.177654 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="pull" Oct 07 19:29:12 crc kubenswrapper[4813]: E1007 19:29:12.177669 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="util" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.177683 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="util" Oct 07 19:29:12 crc kubenswrapper[4813]: E1007 19:29:12.177695 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba658322-d68e-4312-8283-4da69865e460" containerName="console" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.177701 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba658322-d68e-4312-8283-4da69865e460" containerName="console" Oct 07 19:29:12 crc kubenswrapper[4813]: E1007 19:29:12.177709 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="extract" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.177716 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="extract" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.177833 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d920201-8633-4cea-9d52-95f13d4e80ec" containerName="extract" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.177844 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba658322-d68e-4312-8283-4da69865e460" containerName="console" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.178182 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.181039 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-q2n8p" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.181424 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.181565 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.183857 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.190711 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.198697 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-66cfc88647-54n75"] Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.202568 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/602ff599-0e30-47a2-a316-75053689d031-apiservice-cert\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.202607 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/602ff599-0e30-47a2-a316-75053689d031-webhook-cert\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.202631 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxw6r\" (UniqueName: \"kubernetes.io/projected/602ff599-0e30-47a2-a316-75053689d031-kube-api-access-mxw6r\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.303470 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/602ff599-0e30-47a2-a316-75053689d031-apiservice-cert\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.303520 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/602ff599-0e30-47a2-a316-75053689d031-webhook-cert\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.303548 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxw6r\" (UniqueName: \"kubernetes.io/projected/602ff599-0e30-47a2-a316-75053689d031-kube-api-access-mxw6r\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.322344 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/602ff599-0e30-47a2-a316-75053689d031-apiservice-cert\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.323808 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/602ff599-0e30-47a2-a316-75053689d031-webhook-cert\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.326943 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxw6r\" (UniqueName: \"kubernetes.io/projected/602ff599-0e30-47a2-a316-75053689d031-kube-api-access-mxw6r\") pod \"metallb-operator-controller-manager-66cfc88647-54n75\" (UID: \"602ff599-0e30-47a2-a316-75053689d031\") " pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.333583 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm"] Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.334227 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.338683 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.339907 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.341033 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-z6qmz" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.360820 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm"] Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.404439 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-webhook-cert\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.404730 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxgts\" (UniqueName: \"kubernetes.io/projected/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-kube-api-access-sxgts\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.404819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-apiservice-cert\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.492178 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.506123 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxgts\" (UniqueName: \"kubernetes.io/projected/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-kube-api-access-sxgts\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.506180 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-apiservice-cert\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.506444 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-webhook-cert\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.512986 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-apiservice-cert\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.513155 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-webhook-cert\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.527104 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxgts\" (UniqueName: \"kubernetes.io/projected/dc1f8862-6e00-4d65-a90f-f0db7d23cf42-kube-api-access-sxgts\") pod \"metallb-operator-webhook-server-d65cbb559-4qksm\" (UID: \"dc1f8862-6e00-4d65-a90f-f0db7d23cf42\") " pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.657347 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:12 crc kubenswrapper[4813]: I1007 19:29:12.810880 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-66cfc88647-54n75"] Oct 07 19:29:13 crc kubenswrapper[4813]: I1007 19:29:13.126407 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm"] Oct 07 19:29:13 crc kubenswrapper[4813]: W1007 19:29:13.130249 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc1f8862_6e00_4d65_a90f_f0db7d23cf42.slice/crio-5dc82992c7142b62793d90f841dd37cdc6bf97885dfd9c8f983df9ec8e0c9193 WatchSource:0}: Error finding container 5dc82992c7142b62793d90f841dd37cdc6bf97885dfd9c8f983df9ec8e0c9193: Status 404 returned error can't find the container with id 5dc82992c7142b62793d90f841dd37cdc6bf97885dfd9c8f983df9ec8e0c9193 Oct 07 19:29:13 crc kubenswrapper[4813]: I1007 19:29:13.296357 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" event={"ID":"dc1f8862-6e00-4d65-a90f-f0db7d23cf42","Type":"ContainerStarted","Data":"5dc82992c7142b62793d90f841dd37cdc6bf97885dfd9c8f983df9ec8e0c9193"} Oct 07 19:29:13 crc kubenswrapper[4813]: I1007 19:29:13.297081 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" event={"ID":"602ff599-0e30-47a2-a316-75053689d031","Type":"ContainerStarted","Data":"5b27fa665a95ee4a4e7e7aed88463775611fb475e6505583fc695ef2c3dc4611"} Oct 07 19:29:18 crc kubenswrapper[4813]: I1007 19:29:18.345027 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" event={"ID":"602ff599-0e30-47a2-a316-75053689d031","Type":"ContainerStarted","Data":"3807023fe273e891b73e13331accaef0bdaa356c70c1fbb741e170d4d184298e"} Oct 07 19:29:18 crc kubenswrapper[4813]: I1007 19:29:18.345748 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:18 crc kubenswrapper[4813]: I1007 19:29:18.350171 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" event={"ID":"dc1f8862-6e00-4d65-a90f-f0db7d23cf42","Type":"ContainerStarted","Data":"6965302d18f77291dfa774d2432b54cd637617c27275b9f072b3486f1ee9f546"} Oct 07 19:29:18 crc kubenswrapper[4813]: I1007 19:29:18.350274 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:18 crc kubenswrapper[4813]: I1007 19:29:18.368808 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" podStartSLOduration=1.273592754 podStartE2EDuration="6.368781203s" podCreationTimestamp="2025-10-07 19:29:12 +0000 UTC" firstStartedPulling="2025-10-07 19:29:12.853548235 +0000 UTC m=+678.931803846" lastFinishedPulling="2025-10-07 19:29:17.948736684 +0000 UTC m=+684.026992295" observedRunningTime="2025-10-07 19:29:18.362768894 +0000 UTC m=+684.441024515" watchObservedRunningTime="2025-10-07 19:29:18.368781203 +0000 UTC m=+684.447036854" Oct 07 19:29:18 crc kubenswrapper[4813]: I1007 19:29:18.387383 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" podStartSLOduration=1.5503678189999999 podStartE2EDuration="6.387362568s" podCreationTimestamp="2025-10-07 19:29:12 +0000 UTC" firstStartedPulling="2025-10-07 19:29:13.13235229 +0000 UTC m=+679.210607901" lastFinishedPulling="2025-10-07 19:29:17.969347039 +0000 UTC m=+684.047602650" observedRunningTime="2025-10-07 19:29:18.385860253 +0000 UTC m=+684.464115864" watchObservedRunningTime="2025-10-07 19:29:18.387362568 +0000 UTC m=+684.465618219" Oct 07 19:29:32 crc kubenswrapper[4813]: I1007 19:29:32.668920 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-d65cbb559-4qksm" Oct 07 19:29:52 crc kubenswrapper[4813]: I1007 19:29:52.079082 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:29:52 crc kubenswrapper[4813]: I1007 19:29:52.080538 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:29:52 crc kubenswrapper[4813]: I1007 19:29:52.496176 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-66cfc88647-54n75" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.229364 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-64plq"] Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.232039 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.237669 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.237920 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.238276 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-vkp6h" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.241163 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg"] Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.241872 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.243753 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.264091 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg"] Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342273 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-startup\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342350 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xv4l\" (UniqueName: \"kubernetes.io/projected/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-kube-api-access-6xv4l\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342375 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-conf\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342418 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics-certs\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342448 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zs8h\" (UniqueName: \"kubernetes.io/projected/d5c7c957-5714-4478-874f-1fe2cc7809af-kube-api-access-6zs8h\") pod \"frr-k8s-webhook-server-64bf5d555-m79zg\" (UID: \"d5c7c957-5714-4478-874f-1fe2cc7809af\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342583 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-reloader\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342629 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342669 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d5c7c957-5714-4478-874f-1fe2cc7809af-cert\") pod \"frr-k8s-webhook-server-64bf5d555-m79zg\" (UID: \"d5c7c957-5714-4478-874f-1fe2cc7809af\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.342716 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-sockets\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.345575 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-5qpw7"] Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.346652 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.349518 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-xnk8z" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.349746 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.350523 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.353248 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-w6bx6"] Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.354011 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.363852 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.364201 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.377048 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-w6bx6"] Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.443836 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/184d7c42-4069-4dbe-a8e7-613da65cfb62-metallb-excludel2\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.443880 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-reloader\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.443898 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.443919 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d5c7c957-5714-4478-874f-1fe2cc7809af-cert\") pod \"frr-k8s-webhook-server-64bf5d555-m79zg\" (UID: \"d5c7c957-5714-4478-874f-1fe2cc7809af\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.443966 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z5mj\" (UniqueName: \"kubernetes.io/projected/184d7c42-4069-4dbe-a8e7-613da65cfb62-kube-api-access-4z5mj\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.443985 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-sockets\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444014 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-metrics-certs\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444051 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-startup\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444067 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xv4l\" (UniqueName: \"kubernetes.io/projected/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-kube-api-access-6xv4l\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444082 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-conf\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444103 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics-certs\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444132 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zs8h\" (UniqueName: \"kubernetes.io/projected/d5c7c957-5714-4478-874f-1fe2cc7809af-kube-api-access-6zs8h\") pod \"frr-k8s-webhook-server-64bf5d555-m79zg\" (UID: \"d5c7c957-5714-4478-874f-1fe2cc7809af\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444686 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-reloader\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.444861 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.445462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-sockets\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.445676 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-conf\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: E1007 19:29:53.445727 4813 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 07 19:29:53 crc kubenswrapper[4813]: E1007 19:29:53.445816 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics-certs podName:969bd45f-c575-4c44-a7f8-b8fc0fd89a05 nodeName:}" failed. No retries permitted until 2025-10-07 19:29:53.945796502 +0000 UTC m=+720.024052113 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics-certs") pod "frr-k8s-64plq" (UID: "969bd45f-c575-4c44-a7f8-b8fc0fd89a05") : secret "frr-k8s-certs-secret" not found Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.446630 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-frr-startup\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.450821 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d5c7c957-5714-4478-874f-1fe2cc7809af-cert\") pod \"frr-k8s-webhook-server-64bf5d555-m79zg\" (UID: \"d5c7c957-5714-4478-874f-1fe2cc7809af\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.464822 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xv4l\" (UniqueName: \"kubernetes.io/projected/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-kube-api-access-6xv4l\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.470843 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zs8h\" (UniqueName: \"kubernetes.io/projected/d5c7c957-5714-4478-874f-1fe2cc7809af-kube-api-access-6zs8h\") pod \"frr-k8s-webhook-server-64bf5d555-m79zg\" (UID: \"d5c7c957-5714-4478-874f-1fe2cc7809af\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544696 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-metrics-certs\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544741 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544786 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-metrics-certs\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544814 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-cert\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544841 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/184d7c42-4069-4dbe-a8e7-613da65cfb62-metallb-excludel2\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544869 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z5mj\" (UniqueName: \"kubernetes.io/projected/184d7c42-4069-4dbe-a8e7-613da65cfb62-kube-api-access-4z5mj\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.544885 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k9nn\" (UniqueName: \"kubernetes.io/projected/ece86cbe-2002-4e30-bedb-56f9631f5726-kube-api-access-5k9nn\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: E1007 19:29:53.545620 4813 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 07 19:29:53 crc kubenswrapper[4813]: E1007 19:29:53.545692 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist podName:184d7c42-4069-4dbe-a8e7-613da65cfb62 nodeName:}" failed. No retries permitted until 2025-10-07 19:29:54.045671956 +0000 UTC m=+720.123927617 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist") pod "speaker-5qpw7" (UID: "184d7c42-4069-4dbe-a8e7-613da65cfb62") : secret "metallb-memberlist" not found Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.546091 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/184d7c42-4069-4dbe-a8e7-613da65cfb62-metallb-excludel2\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.547924 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-metrics-certs\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.560283 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.562793 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z5mj\" (UniqueName: \"kubernetes.io/projected/184d7c42-4069-4dbe-a8e7-613da65cfb62-kube-api-access-4z5mj\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.646046 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-metrics-certs\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.646111 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-cert\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.646160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k9nn\" (UniqueName: \"kubernetes.io/projected/ece86cbe-2002-4e30-bedb-56f9631f5726-kube-api-access-5k9nn\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: E1007 19:29:53.646267 4813 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 07 19:29:53 crc kubenswrapper[4813]: E1007 19:29:53.646478 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-metrics-certs podName:ece86cbe-2002-4e30-bedb-56f9631f5726 nodeName:}" failed. No retries permitted until 2025-10-07 19:29:54.146452477 +0000 UTC m=+720.224708088 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-metrics-certs") pod "controller-68d546b9d8-w6bx6" (UID: "ece86cbe-2002-4e30-bedb-56f9631f5726") : secret "controller-certs-secret" not found Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.664657 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-cert\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.669530 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k9nn\" (UniqueName: \"kubernetes.io/projected/ece86cbe-2002-4e30-bedb-56f9631f5726-kube-api-access-5k9nn\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.948651 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics-certs\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.952061 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/969bd45f-c575-4c44-a7f8-b8fc0fd89a05-metrics-certs\") pod \"frr-k8s-64plq\" (UID: \"969bd45f-c575-4c44-a7f8-b8fc0fd89a05\") " pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:53 crc kubenswrapper[4813]: I1007 19:29:53.988232 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg"] Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.049862 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:54 crc kubenswrapper[4813]: E1007 19:29:54.050004 4813 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 07 19:29:54 crc kubenswrapper[4813]: E1007 19:29:54.050092 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist podName:184d7c42-4069-4dbe-a8e7-613da65cfb62 nodeName:}" failed. No retries permitted until 2025-10-07 19:29:55.050044477 +0000 UTC m=+721.128300088 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist") pod "speaker-5qpw7" (UID: "184d7c42-4069-4dbe-a8e7-613da65cfb62") : secret "metallb-memberlist" not found Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.151338 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-metrics-certs\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.154668 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-64plq" Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.156811 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ece86cbe-2002-4e30-bedb-56f9631f5726-metrics-certs\") pod \"controller-68d546b9d8-w6bx6\" (UID: \"ece86cbe-2002-4e30-bedb-56f9631f5726\") " pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.267688 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.521135 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-w6bx6"] Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.573915 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" event={"ID":"d5c7c957-5714-4478-874f-1fe2cc7809af","Type":"ContainerStarted","Data":"1cc2caf4fda1a3d1e914835b3b344a33c5f9eccbc4b5ffcfa96af0b51bb8e7e8"} Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.575381 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"5e25bea876c7c6af9dfe97d9d7f94187f6fd585e8483f61cdf76e66272940cad"} Oct 07 19:29:54 crc kubenswrapper[4813]: I1007 19:29:54.576306 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-w6bx6" event={"ID":"ece86cbe-2002-4e30-bedb-56f9631f5726","Type":"ContainerStarted","Data":"5f0838805fafd2738833fff9151282987778a23eabd1cdf1a57bb3580ea38607"} Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.063505 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.069231 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/184d7c42-4069-4dbe-a8e7-613da65cfb62-memberlist\") pod \"speaker-5qpw7\" (UID: \"184d7c42-4069-4dbe-a8e7-613da65cfb62\") " pod="metallb-system/speaker-5qpw7" Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.164845 4813 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-xnk8z" Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.172133 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-5qpw7" Oct 07 19:29:55 crc kubenswrapper[4813]: W1007 19:29:55.208644 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod184d7c42_4069_4dbe_a8e7_613da65cfb62.slice/crio-a28a3650bb10aa6b40190a124d4e7fb3ce830261ebadfb0d9da3163ec563dca1 WatchSource:0}: Error finding container a28a3650bb10aa6b40190a124d4e7fb3ce830261ebadfb0d9da3163ec563dca1: Status 404 returned error can't find the container with id a28a3650bb10aa6b40190a124d4e7fb3ce830261ebadfb0d9da3163ec563dca1 Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.589042 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-5qpw7" event={"ID":"184d7c42-4069-4dbe-a8e7-613da65cfb62","Type":"ContainerStarted","Data":"8e8eccfd6d1da83d14e270e4c75538ee6e395888e584aaff527c5803e1767d22"} Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.589407 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-5qpw7" event={"ID":"184d7c42-4069-4dbe-a8e7-613da65cfb62","Type":"ContainerStarted","Data":"a28a3650bb10aa6b40190a124d4e7fb3ce830261ebadfb0d9da3163ec563dca1"} Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.595168 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-w6bx6" event={"ID":"ece86cbe-2002-4e30-bedb-56f9631f5726","Type":"ContainerStarted","Data":"877f42ae3ea9aaa313f3ae1a4a0053d3eee3c0628510e2606a79d63f0b0cc4b0"} Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.595216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-w6bx6" event={"ID":"ece86cbe-2002-4e30-bedb-56f9631f5726","Type":"ContainerStarted","Data":"d4388a61d3d280843bcb19583c62c4aaed2f3ffeadd325d9562902b431bf2fb7"} Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.595374 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:29:55 crc kubenswrapper[4813]: I1007 19:29:55.616454 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-w6bx6" podStartSLOduration=2.6164403849999998 podStartE2EDuration="2.616440385s" podCreationTimestamp="2025-10-07 19:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:29:55.613696093 +0000 UTC m=+721.691951704" watchObservedRunningTime="2025-10-07 19:29:55.616440385 +0000 UTC m=+721.694695996" Oct 07 19:29:56 crc kubenswrapper[4813]: I1007 19:29:56.629232 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-5qpw7" event={"ID":"184d7c42-4069-4dbe-a8e7-613da65cfb62","Type":"ContainerStarted","Data":"3b669c6b597d0a69984ad77af9772bfe01ef951bdde666e778cd3467cfcd0e52"} Oct 07 19:29:56 crc kubenswrapper[4813]: I1007 19:29:56.648450 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-5qpw7" podStartSLOduration=3.648429502 podStartE2EDuration="3.648429502s" podCreationTimestamp="2025-10-07 19:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:29:56.645689441 +0000 UTC m=+722.723945052" watchObservedRunningTime="2025-10-07 19:29:56.648429502 +0000 UTC m=+722.726685113" Oct 07 19:29:57 crc kubenswrapper[4813]: I1007 19:29:57.636997 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-5qpw7" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.130430 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc"] Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.131451 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.133086 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.133805 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc"] Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.135156 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.143074 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2217c78c-2018-4f7d-99eb-158ca2077830-config-volume\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.143122 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdnrh\" (UniqueName: \"kubernetes.io/projected/2217c78c-2018-4f7d-99eb-158ca2077830-kube-api-access-zdnrh\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.143213 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2217c78c-2018-4f7d-99eb-158ca2077830-secret-volume\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.249106 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdnrh\" (UniqueName: \"kubernetes.io/projected/2217c78c-2018-4f7d-99eb-158ca2077830-kube-api-access-zdnrh\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.249539 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2217c78c-2018-4f7d-99eb-158ca2077830-secret-volume\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.249763 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2217c78c-2018-4f7d-99eb-158ca2077830-config-volume\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.250739 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2217c78c-2018-4f7d-99eb-158ca2077830-config-volume\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.257153 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2217c78c-2018-4f7d-99eb-158ca2077830-secret-volume\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.278598 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdnrh\" (UniqueName: \"kubernetes.io/projected/2217c78c-2018-4f7d-99eb-158ca2077830-kube-api-access-zdnrh\") pod \"collect-profiles-29331090-lfhdc\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:00 crc kubenswrapper[4813]: I1007 19:30:00.446091 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:01 crc kubenswrapper[4813]: I1007 19:30:01.903858 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc"] Oct 07 19:30:01 crc kubenswrapper[4813]: W1007 19:30:01.907519 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2217c78c_2018_4f7d_99eb_158ca2077830.slice/crio-9345ee65e85519b415c06134975ad2f4f1a6b53d9f898762095b5c5b39b3b1b9 WatchSource:0}: Error finding container 9345ee65e85519b415c06134975ad2f4f1a6b53d9f898762095b5c5b39b3b1b9: Status 404 returned error can't find the container with id 9345ee65e85519b415c06134975ad2f4f1a6b53d9f898762095b5c5b39b3b1b9 Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.668538 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" event={"ID":"d5c7c957-5714-4478-874f-1fe2cc7809af","Type":"ContainerStarted","Data":"ba7c680b008c952e274a376b70b8ec7be60d962f8d810bc437d7268402124de0"} Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.668679 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.669772 4813 generic.go:334] "Generic (PLEG): container finished" podID="2217c78c-2018-4f7d-99eb-158ca2077830" containerID="01460ba679d0c86e7a748ebe4c95fe1dc3064d488072901947b05d4e54dd1027" exitCode=0 Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.669861 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" event={"ID":"2217c78c-2018-4f7d-99eb-158ca2077830","Type":"ContainerDied","Data":"01460ba679d0c86e7a748ebe4c95fe1dc3064d488072901947b05d4e54dd1027"} Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.669945 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" event={"ID":"2217c78c-2018-4f7d-99eb-158ca2077830","Type":"ContainerStarted","Data":"9345ee65e85519b415c06134975ad2f4f1a6b53d9f898762095b5c5b39b3b1b9"} Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.671812 4813 generic.go:334] "Generic (PLEG): container finished" podID="969bd45f-c575-4c44-a7f8-b8fc0fd89a05" containerID="dd9cd90a1434501468780f05655ab0c26b750d5f247c5ff8c9fb799ca763cd67" exitCode=0 Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.671851 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerDied","Data":"dd9cd90a1434501468780f05655ab0c26b750d5f247c5ff8c9fb799ca763cd67"} Oct 07 19:30:02 crc kubenswrapper[4813]: I1007 19:30:02.694024 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" podStartSLOduration=2.028357738 podStartE2EDuration="9.694003275s" podCreationTimestamp="2025-10-07 19:29:53 +0000 UTC" firstStartedPulling="2025-10-07 19:29:53.995432751 +0000 UTC m=+720.073688362" lastFinishedPulling="2025-10-07 19:30:01.661078288 +0000 UTC m=+727.739333899" observedRunningTime="2025-10-07 19:30:02.693590743 +0000 UTC m=+728.771846374" watchObservedRunningTime="2025-10-07 19:30:02.694003275 +0000 UTC m=+728.772258896" Oct 07 19:30:03 crc kubenswrapper[4813]: I1007 19:30:03.685355 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerDied","Data":"502d1fb601936f922d69bcffc8a9d4d848b36fc083ab81fde6182de0984c8526"} Oct 07 19:30:03 crc kubenswrapper[4813]: I1007 19:30:03.685305 4813 generic.go:334] "Generic (PLEG): container finished" podID="969bd45f-c575-4c44-a7f8-b8fc0fd89a05" containerID="502d1fb601936f922d69bcffc8a9d4d848b36fc083ab81fde6182de0984c8526" exitCode=0 Oct 07 19:30:03 crc kubenswrapper[4813]: I1007 19:30:03.994414 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.098274 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2217c78c-2018-4f7d-99eb-158ca2077830-config-volume\") pod \"2217c78c-2018-4f7d-99eb-158ca2077830\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.098370 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2217c78c-2018-4f7d-99eb-158ca2077830-secret-volume\") pod \"2217c78c-2018-4f7d-99eb-158ca2077830\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.098411 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdnrh\" (UniqueName: \"kubernetes.io/projected/2217c78c-2018-4f7d-99eb-158ca2077830-kube-api-access-zdnrh\") pod \"2217c78c-2018-4f7d-99eb-158ca2077830\" (UID: \"2217c78c-2018-4f7d-99eb-158ca2077830\") " Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.098874 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2217c78c-2018-4f7d-99eb-158ca2077830-config-volume" (OuterVolumeSpecName: "config-volume") pod "2217c78c-2018-4f7d-99eb-158ca2077830" (UID: "2217c78c-2018-4f7d-99eb-158ca2077830"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.103991 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2217c78c-2018-4f7d-99eb-158ca2077830-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2217c78c-2018-4f7d-99eb-158ca2077830" (UID: "2217c78c-2018-4f7d-99eb-158ca2077830"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.105195 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2217c78c-2018-4f7d-99eb-158ca2077830-kube-api-access-zdnrh" (OuterVolumeSpecName: "kube-api-access-zdnrh") pod "2217c78c-2018-4f7d-99eb-158ca2077830" (UID: "2217c78c-2018-4f7d-99eb-158ca2077830"). InnerVolumeSpecName "kube-api-access-zdnrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.199906 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2217c78c-2018-4f7d-99eb-158ca2077830-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.199958 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdnrh\" (UniqueName: \"kubernetes.io/projected/2217c78c-2018-4f7d-99eb-158ca2077830-kube-api-access-zdnrh\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.199970 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2217c78c-2018-4f7d-99eb-158ca2077830-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.273695 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-w6bx6" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.693672 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.694439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc" event={"ID":"2217c78c-2018-4f7d-99eb-158ca2077830","Type":"ContainerDied","Data":"9345ee65e85519b415c06134975ad2f4f1a6b53d9f898762095b5c5b39b3b1b9"} Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.694497 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9345ee65e85519b415c06134975ad2f4f1a6b53d9f898762095b5c5b39b3b1b9" Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.697575 4813 generic.go:334] "Generic (PLEG): container finished" podID="969bd45f-c575-4c44-a7f8-b8fc0fd89a05" containerID="242320530caec1ad15a7f10861db87e70003117dc6cd103420dad376ddb12dae" exitCode=0 Oct 07 19:30:04 crc kubenswrapper[4813]: I1007 19:30:04.697616 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerDied","Data":"242320530caec1ad15a7f10861db87e70003117dc6cd103420dad376ddb12dae"} Oct 07 19:30:05 crc kubenswrapper[4813]: I1007 19:30:05.176189 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-5qpw7" Oct 07 19:30:05 crc kubenswrapper[4813]: I1007 19:30:05.706075 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"03f1ed38d25306a7c494dd3ccf94c4a305cbc1c0d126df13f930a963d17de050"} Oct 07 19:30:05 crc kubenswrapper[4813]: I1007 19:30:05.706393 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"ef361a2b1c6f62bad6807afead0fb68e5d582d4fa8e7b3736c88552e2dfdb973"} Oct 07 19:30:05 crc kubenswrapper[4813]: I1007 19:30:05.706403 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"1cceb143076556570745a09540954be3ec1177ad0c41b6ec14587754c8596ab3"} Oct 07 19:30:06 crc kubenswrapper[4813]: I1007 19:30:06.721266 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"dd213874ab24f248cc23666b7ca628965194a51cc89277f88deead455120cd89"} Oct 07 19:30:06 crc kubenswrapper[4813]: I1007 19:30:06.721306 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"a04129b7ee2dd750b950bae1eaf518cac11c041dcadffca3a830aa308472c178"} Oct 07 19:30:06 crc kubenswrapper[4813]: I1007 19:30:06.721316 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-64plq" event={"ID":"969bd45f-c575-4c44-a7f8-b8fc0fd89a05","Type":"ContainerStarted","Data":"06f82da6d376f1fa402bec7f1c3412ea1c6a1ce1dae19e0db607f0b2251109df"} Oct 07 19:30:06 crc kubenswrapper[4813]: I1007 19:30:06.721784 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-64plq" Oct 07 19:30:06 crc kubenswrapper[4813]: I1007 19:30:06.761800 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-64plq" podStartSLOduration=6.447387033 podStartE2EDuration="13.761782209s" podCreationTimestamp="2025-10-07 19:29:53 +0000 UTC" firstStartedPulling="2025-10-07 19:29:54.282238136 +0000 UTC m=+720.360493747" lastFinishedPulling="2025-10-07 19:30:01.596633312 +0000 UTC m=+727.674888923" observedRunningTime="2025-10-07 19:30:06.757778961 +0000 UTC m=+732.836034592" watchObservedRunningTime="2025-10-07 19:30:06.761782209 +0000 UTC m=+732.840037820" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.032777 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2r2ct"] Oct 07 19:30:08 crc kubenswrapper[4813]: E1007 19:30:08.034277 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2217c78c-2018-4f7d-99eb-158ca2077830" containerName="collect-profiles" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.034451 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2217c78c-2018-4f7d-99eb-158ca2077830" containerName="collect-profiles" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.034802 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2217c78c-2018-4f7d-99eb-158ca2077830" containerName="collect-profiles" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.035587 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.040629 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2r2ct"] Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.042377 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-srdff" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.042621 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.042769 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.151148 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk72z\" (UniqueName: \"kubernetes.io/projected/f9a4d101-90e3-416d-a06d-984dc037c600-kube-api-access-wk72z\") pod \"openstack-operator-index-2r2ct\" (UID: \"f9a4d101-90e3-416d-a06d-984dc037c600\") " pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.252739 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk72z\" (UniqueName: \"kubernetes.io/projected/f9a4d101-90e3-416d-a06d-984dc037c600-kube-api-access-wk72z\") pod \"openstack-operator-index-2r2ct\" (UID: \"f9a4d101-90e3-416d-a06d-984dc037c600\") " pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.269184 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk72z\" (UniqueName: \"kubernetes.io/projected/f9a4d101-90e3-416d-a06d-984dc037c600-kube-api-access-wk72z\") pod \"openstack-operator-index-2r2ct\" (UID: \"f9a4d101-90e3-416d-a06d-984dc037c600\") " pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.359048 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:08 crc kubenswrapper[4813]: I1007 19:30:08.775744 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2r2ct"] Oct 07 19:30:09 crc kubenswrapper[4813]: I1007 19:30:09.155717 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-64plq" Oct 07 19:30:09 crc kubenswrapper[4813]: I1007 19:30:09.195067 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-64plq" Oct 07 19:30:09 crc kubenswrapper[4813]: I1007 19:30:09.739564 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2r2ct" event={"ID":"f9a4d101-90e3-416d-a06d-984dc037c600","Type":"ContainerStarted","Data":"607e37bbcc51e06d6c61daa00ecfbe9fc9be2ee79d2109fe6f6e9ad204adb545"} Oct 07 19:30:11 crc kubenswrapper[4813]: I1007 19:30:11.398514 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2r2ct"] Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.001023 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-r7css"] Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.002315 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.016441 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-r7css"] Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.102816 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsp8w\" (UniqueName: \"kubernetes.io/projected/ec9bfed9-1014-4ae2-ad89-b1815b613369-kube-api-access-tsp8w\") pod \"openstack-operator-index-r7css\" (UID: \"ec9bfed9-1014-4ae2-ad89-b1815b613369\") " pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.204659 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsp8w\" (UniqueName: \"kubernetes.io/projected/ec9bfed9-1014-4ae2-ad89-b1815b613369-kube-api-access-tsp8w\") pod \"openstack-operator-index-r7css\" (UID: \"ec9bfed9-1014-4ae2-ad89-b1815b613369\") " pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.226249 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsp8w\" (UniqueName: \"kubernetes.io/projected/ec9bfed9-1014-4ae2-ad89-b1815b613369-kube-api-access-tsp8w\") pod \"openstack-operator-index-r7css\" (UID: \"ec9bfed9-1014-4ae2-ad89-b1815b613369\") " pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.316680 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.541062 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-r7css"] Oct 07 19:30:12 crc kubenswrapper[4813]: W1007 19:30:12.549131 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec9bfed9_1014_4ae2_ad89_b1815b613369.slice/crio-6cad9b209e8f9343094d39fd3cccef6e54fcebf0cf341d188ee495553ef51ab7 WatchSource:0}: Error finding container 6cad9b209e8f9343094d39fd3cccef6e54fcebf0cf341d188ee495553ef51ab7: Status 404 returned error can't find the container with id 6cad9b209e8f9343094d39fd3cccef6e54fcebf0cf341d188ee495553ef51ab7 Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.758442 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-r7css" event={"ID":"ec9bfed9-1014-4ae2-ad89-b1815b613369","Type":"ContainerStarted","Data":"d7f7e85a19b49d5d244b337165076b507f18b6efd30af31f35484108a8c42dbf"} Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.758492 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-r7css" event={"ID":"ec9bfed9-1014-4ae2-ad89-b1815b613369","Type":"ContainerStarted","Data":"6cad9b209e8f9343094d39fd3cccef6e54fcebf0cf341d188ee495553ef51ab7"} Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.759927 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2r2ct" event={"ID":"f9a4d101-90e3-416d-a06d-984dc037c600","Type":"ContainerStarted","Data":"6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19"} Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.760188 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-2r2ct" podUID="f9a4d101-90e3-416d-a06d-984dc037c600" containerName="registry-server" containerID="cri-o://6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19" gracePeriod=2 Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.776042 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-r7css" podStartSLOduration=1.7276997139999999 podStartE2EDuration="1.776022264s" podCreationTimestamp="2025-10-07 19:30:11 +0000 UTC" firstStartedPulling="2025-10-07 19:30:12.553757018 +0000 UTC m=+738.632012629" lastFinishedPulling="2025-10-07 19:30:12.602079568 +0000 UTC m=+738.680335179" observedRunningTime="2025-10-07 19:30:12.770976934 +0000 UTC m=+738.849232555" watchObservedRunningTime="2025-10-07 19:30:12.776022264 +0000 UTC m=+738.854277885" Oct 07 19:30:12 crc kubenswrapper[4813]: I1007 19:30:12.790006 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2r2ct" podStartSLOduration=1.672759412 podStartE2EDuration="4.789987037s" podCreationTimestamp="2025-10-07 19:30:08 +0000 UTC" firstStartedPulling="2025-10-07 19:30:08.783248968 +0000 UTC m=+734.861504579" lastFinishedPulling="2025-10-07 19:30:11.900476593 +0000 UTC m=+737.978732204" observedRunningTime="2025-10-07 19:30:12.788995127 +0000 UTC m=+738.867250738" watchObservedRunningTime="2025-10-07 19:30:12.789987037 +0000 UTC m=+738.868242658" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.178985 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.318078 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk72z\" (UniqueName: \"kubernetes.io/projected/f9a4d101-90e3-416d-a06d-984dc037c600-kube-api-access-wk72z\") pod \"f9a4d101-90e3-416d-a06d-984dc037c600\" (UID: \"f9a4d101-90e3-416d-a06d-984dc037c600\") " Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.336124 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a4d101-90e3-416d-a06d-984dc037c600-kube-api-access-wk72z" (OuterVolumeSpecName: "kube-api-access-wk72z") pod "f9a4d101-90e3-416d-a06d-984dc037c600" (UID: "f9a4d101-90e3-416d-a06d-984dc037c600"). InnerVolumeSpecName "kube-api-access-wk72z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.419541 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk72z\" (UniqueName: \"kubernetes.io/projected/f9a4d101-90e3-416d-a06d-984dc037c600-kube-api-access-wk72z\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.564196 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-m79zg" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.766447 4813 generic.go:334] "Generic (PLEG): container finished" podID="f9a4d101-90e3-416d-a06d-984dc037c600" containerID="6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19" exitCode=0 Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.766531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2r2ct" event={"ID":"f9a4d101-90e3-416d-a06d-984dc037c600","Type":"ContainerDied","Data":"6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19"} Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.766663 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2r2ct" event={"ID":"f9a4d101-90e3-416d-a06d-984dc037c600","Type":"ContainerDied","Data":"607e37bbcc51e06d6c61daa00ecfbe9fc9be2ee79d2109fe6f6e9ad204adb545"} Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.766694 4813 scope.go:117] "RemoveContainer" containerID="6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.767122 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2r2ct" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.780719 4813 scope.go:117] "RemoveContainer" containerID="6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19" Oct 07 19:30:13 crc kubenswrapper[4813]: E1007 19:30:13.781135 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19\": container with ID starting with 6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19 not found: ID does not exist" containerID="6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.781212 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19"} err="failed to get container status \"6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19\": rpc error: code = NotFound desc = could not find container \"6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19\": container with ID starting with 6b0dfb66a4fd88aa58961e4336ff7f28609d0394c47787d1b712daf0dcadbe19 not found: ID does not exist" Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.799180 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2r2ct"] Oct 07 19:30:13 crc kubenswrapper[4813]: I1007 19:30:13.802105 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-2r2ct"] Oct 07 19:30:14 crc kubenswrapper[4813]: I1007 19:30:14.613238 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a4d101-90e3-416d-a06d-984dc037c600" path="/var/lib/kubelet/pods/f9a4d101-90e3-416d-a06d-984dc037c600/volumes" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.155744 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dwjg2"] Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.156309 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" containerID="cri-o://f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5" gracePeriod=30 Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.260671 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb"] Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.261172 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" containerID="cri-o://34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f" gracePeriod=30 Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.759587 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.765004 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.799029 4813 generic.go:334] "Generic (PLEG): container finished" podID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerID="f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5" exitCode=0 Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.799070 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.799132 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" event={"ID":"8ac797b3-b22d-4c0a-9d08-733d851ad9f2","Type":"ContainerDied","Data":"f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5"} Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.799202 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dwjg2" event={"ID":"8ac797b3-b22d-4c0a-9d08-733d851ad9f2","Type":"ContainerDied","Data":"45b1e88254ef65963ad74c2b8995609f3d59af429f588b5c24d774909354118f"} Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.799221 4813 scope.go:117] "RemoveContainer" containerID="f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.800381 4813 generic.go:334] "Generic (PLEG): container finished" podID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerID="34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f" exitCode=0 Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.800449 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.800563 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" event={"ID":"958766cf-ba8d-4342-a0c2-d8562d930f2e","Type":"ContainerDied","Data":"34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f"} Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.800584 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb" event={"ID":"958766cf-ba8d-4342-a0c2-d8562d930f2e","Type":"ContainerDied","Data":"feac8abc88945abef2e3007be2119f58ee6d88071cc0cd31f83976bcdca65e43"} Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.818620 4813 scope.go:117] "RemoveContainer" containerID="f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5" Oct 07 19:30:18 crc kubenswrapper[4813]: E1007 19:30:18.819050 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5\": container with ID starting with f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5 not found: ID does not exist" containerID="f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.819086 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5"} err="failed to get container status \"f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5\": rpc error: code = NotFound desc = could not find container \"f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5\": container with ID starting with f5ec1ab35bd7f37a06d31014bba17ff06f47695ab4b14acae6c8f212b028d7f5 not found: ID does not exist" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.819107 4813 scope.go:117] "RemoveContainer" containerID="34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.843796 4813 scope.go:117] "RemoveContainer" containerID="34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f" Oct 07 19:30:18 crc kubenswrapper[4813]: E1007 19:30:18.844129 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f\": container with ID starting with 34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f not found: ID does not exist" containerID="34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.844160 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f"} err="failed to get container status \"34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f\": rpc error: code = NotFound desc = could not find container \"34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f\": container with ID starting with 34a9a223568de5ac0e6c15fa91cfcb894eb90a69f65a02d7473f66657d86b24f not found: ID does not exist" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886120 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958766cf-ba8d-4342-a0c2-d8562d930f2e-serving-cert\") pod \"958766cf-ba8d-4342-a0c2-d8562d930f2e\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886172 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-serving-cert\") pod \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886202 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnmp6\" (UniqueName: \"kubernetes.io/projected/958766cf-ba8d-4342-a0c2-d8562d930f2e-kube-api-access-nnmp6\") pod \"958766cf-ba8d-4342-a0c2-d8562d930f2e\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886274 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg4nj\" (UniqueName: \"kubernetes.io/projected/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-kube-api-access-rg4nj\") pod \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886902 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles\") pod \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886958 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca\") pod \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.886995 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-client-ca\") pod \"958766cf-ba8d-4342-a0c2-d8562d930f2e\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887015 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-config\") pod \"958766cf-ba8d-4342-a0c2-d8562d930f2e\" (UID: \"958766cf-ba8d-4342-a0c2-d8562d930f2e\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887070 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-config\") pod \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\" (UID: \"8ac797b3-b22d-4c0a-9d08-733d851ad9f2\") " Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887393 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8ac797b3-b22d-4c0a-9d08-733d851ad9f2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887443 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca" (OuterVolumeSpecName: "client-ca") pod "8ac797b3-b22d-4c0a-9d08-733d851ad9f2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887804 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-client-ca" (OuterVolumeSpecName: "client-ca") pod "958766cf-ba8d-4342-a0c2-d8562d930f2e" (UID: "958766cf-ba8d-4342-a0c2-d8562d930f2e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887875 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-config" (OuterVolumeSpecName: "config") pod "958766cf-ba8d-4342-a0c2-d8562d930f2e" (UID: "958766cf-ba8d-4342-a0c2-d8562d930f2e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.887911 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-config" (OuterVolumeSpecName: "config") pod "8ac797b3-b22d-4c0a-9d08-733d851ad9f2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.891373 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/958766cf-ba8d-4342-a0c2-d8562d930f2e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "958766cf-ba8d-4342-a0c2-d8562d930f2e" (UID: "958766cf-ba8d-4342-a0c2-d8562d930f2e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.891622 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/958766cf-ba8d-4342-a0c2-d8562d930f2e-kube-api-access-nnmp6" (OuterVolumeSpecName: "kube-api-access-nnmp6") pod "958766cf-ba8d-4342-a0c2-d8562d930f2e" (UID: "958766cf-ba8d-4342-a0c2-d8562d930f2e"). InnerVolumeSpecName "kube-api-access-nnmp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.892218 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8ac797b3-b22d-4c0a-9d08-733d851ad9f2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.901603 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-kube-api-access-rg4nj" (OuterVolumeSpecName: "kube-api-access-rg4nj") pod "8ac797b3-b22d-4c0a-9d08-733d851ad9f2" (UID: "8ac797b3-b22d-4c0a-9d08-733d851ad9f2"). InnerVolumeSpecName "kube-api-access-rg4nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988722 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/958766cf-ba8d-4342-a0c2-d8562d930f2e-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988761 4813 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988776 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnmp6\" (UniqueName: \"kubernetes.io/projected/958766cf-ba8d-4342-a0c2-d8562d930f2e-kube-api-access-nnmp6\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988790 4813 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988801 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg4nj\" (UniqueName: \"kubernetes.io/projected/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-kube-api-access-rg4nj\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988812 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988822 4813 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-client-ca\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988833 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958766cf-ba8d-4342-a0c2-d8562d930f2e-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:18 crc kubenswrapper[4813]: I1007 19:30:18.988844 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ac797b3-b22d-4c0a-9d08-733d851ad9f2-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.130817 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.138524 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-lq4gb"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.146115 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dwjg2"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.150747 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dwjg2"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321093 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd"] Oct 07 19:30:19 crc kubenswrapper[4813]: E1007 19:30:19.321693 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321709 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" Oct 07 19:30:19 crc kubenswrapper[4813]: E1007 19:30:19.321735 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321743 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" Oct 07 19:30:19 crc kubenswrapper[4813]: E1007 19:30:19.321760 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a4d101-90e3-416d-a06d-984dc037c600" containerName="registry-server" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321768 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a4d101-90e3-416d-a06d-984dc037c600" containerName="registry-server" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321887 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a4d101-90e3-416d-a06d-984dc037c600" containerName="registry-server" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321907 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" containerName="route-controller-manager" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.321919 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" containerName="controller-manager" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.322366 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.324914 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.325424 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.325546 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.326254 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.327031 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.333811 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.341425 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.393359 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e22684b-384a-45a9-b822-094484aedb2e-client-ca\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.393675 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e22684b-384a-45a9-b822-094484aedb2e-config\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.393817 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e22684b-384a-45a9-b822-094484aedb2e-serving-cert\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.393906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kz28\" (UniqueName: \"kubernetes.io/projected/1e22684b-384a-45a9-b822-094484aedb2e-kube-api-access-2kz28\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.494576 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e22684b-384a-45a9-b822-094484aedb2e-serving-cert\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.494613 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kz28\" (UniqueName: \"kubernetes.io/projected/1e22684b-384a-45a9-b822-094484aedb2e-kube-api-access-2kz28\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.494661 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e22684b-384a-45a9-b822-094484aedb2e-client-ca\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.494679 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e22684b-384a-45a9-b822-094484aedb2e-config\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.495786 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e22684b-384a-45a9-b822-094484aedb2e-config\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.495962 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e22684b-384a-45a9-b822-094484aedb2e-client-ca\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.499403 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e22684b-384a-45a9-b822-094484aedb2e-serving-cert\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.512813 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kz28\" (UniqueName: \"kubernetes.io/projected/1e22684b-384a-45a9-b822-094484aedb2e-kube-api-access-2kz28\") pod \"route-controller-manager-65b8698644-t2rdd\" (UID: \"1e22684b-384a-45a9-b822-094484aedb2e\") " pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.636603 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.735856 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-859cbccb7-tv7vc"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.743608 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.748402 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.748620 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.748738 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.748835 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.748933 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.750736 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.753427 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-859cbccb7-tv7vc"] Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.754724 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.898571 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-config\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.898614 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hwv7\" (UniqueName: \"kubernetes.io/projected/27b736bd-9b68-4797-babb-9e798147588a-kube-api-access-5hwv7\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.898638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-proxy-ca-bundles\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.898659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-client-ca\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:19 crc kubenswrapper[4813]: I1007 19:30:19.898737 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27b736bd-9b68-4797-babb-9e798147588a-serving-cert\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.000261 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hwv7\" (UniqueName: \"kubernetes.io/projected/27b736bd-9b68-4797-babb-9e798147588a-kube-api-access-5hwv7\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.000301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-proxy-ca-bundles\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.000378 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-client-ca\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.000407 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27b736bd-9b68-4797-babb-9e798147588a-serving-cert\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.000475 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-config\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.002412 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-config\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.002761 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-client-ca\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.003189 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27b736bd-9b68-4797-babb-9e798147588a-proxy-ca-bundles\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.008806 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27b736bd-9b68-4797-babb-9e798147588a-serving-cert\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.024554 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hwv7\" (UniqueName: \"kubernetes.io/projected/27b736bd-9b68-4797-babb-9e798147588a-kube-api-access-5hwv7\") pod \"controller-manager-859cbccb7-tv7vc\" (UID: \"27b736bd-9b68-4797-babb-9e798147588a\") " pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.084493 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd"] Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.085134 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: W1007 19:30:20.092687 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e22684b_384a_45a9_b822_094484aedb2e.slice/crio-13f213f836383328807e7b92191b580d3c98d621be9ec11162739d908181a816 WatchSource:0}: Error finding container 13f213f836383328807e7b92191b580d3c98d621be9ec11162739d908181a816: Status 404 returned error can't find the container with id 13f213f836383328807e7b92191b580d3c98d621be9ec11162739d908181a816 Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.500931 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-859cbccb7-tv7vc"] Oct 07 19:30:20 crc kubenswrapper[4813]: W1007 19:30:20.514802 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27b736bd_9b68_4797_babb_9e798147588a.slice/crio-2e0d35337390eb3785e805e108b6bf4a54ce8d444a43f7bbdcb0205952a0d86f WatchSource:0}: Error finding container 2e0d35337390eb3785e805e108b6bf4a54ce8d444a43f7bbdcb0205952a0d86f: Status 404 returned error can't find the container with id 2e0d35337390eb3785e805e108b6bf4a54ce8d444a43f7bbdcb0205952a0d86f Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.611686 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ac797b3-b22d-4c0a-9d08-733d851ad9f2" path="/var/lib/kubelet/pods/8ac797b3-b22d-4c0a-9d08-733d851ad9f2/volumes" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.612308 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="958766cf-ba8d-4342-a0c2-d8562d930f2e" path="/var/lib/kubelet/pods/958766cf-ba8d-4342-a0c2-d8562d930f2e/volumes" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.814604 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" event={"ID":"27b736bd-9b68-4797-babb-9e798147588a","Type":"ContainerStarted","Data":"ab18d11a4f3418609cc94e031cbd1850ece165058cccee5a25c0a7d7802ed972"} Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.814643 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" event={"ID":"27b736bd-9b68-4797-babb-9e798147588a","Type":"ContainerStarted","Data":"2e0d35337390eb3785e805e108b6bf4a54ce8d444a43f7bbdcb0205952a0d86f"} Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.815814 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.816784 4813 patch_prober.go:28] interesting pod/controller-manager-859cbccb7-tv7vc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.66:8443/healthz\": dial tcp 10.217.0.66:8443: connect: connection refused" start-of-body= Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.816817 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" podUID="27b736bd-9b68-4797-babb-9e798147588a" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.66:8443/healthz\": dial tcp 10.217.0.66:8443: connect: connection refused" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.818581 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" event={"ID":"1e22684b-384a-45a9-b822-094484aedb2e","Type":"ContainerStarted","Data":"73c5d2e62ad131bce40cd9a809da994c4e9ad6b57d4ba8e4637aaa24d2abf7cb"} Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.818610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" event={"ID":"1e22684b-384a-45a9-b822-094484aedb2e","Type":"ContainerStarted","Data":"13f213f836383328807e7b92191b580d3c98d621be9ec11162739d908181a816"} Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.819302 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.822729 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.840413 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" podStartSLOduration=2.840398586 podStartE2EDuration="2.840398586s" podCreationTimestamp="2025-10-07 19:30:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:30:20.839762047 +0000 UTC m=+746.918017658" watchObservedRunningTime="2025-10-07 19:30:20.840398586 +0000 UTC m=+746.918654197" Oct 07 19:30:20 crc kubenswrapper[4813]: I1007 19:30:20.863309 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-65b8698644-t2rdd" podStartSLOduration=1.863291214 podStartE2EDuration="1.863291214s" podCreationTimestamp="2025-10-07 19:30:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:30:20.861108239 +0000 UTC m=+746.939363850" watchObservedRunningTime="2025-10-07 19:30:20.863291214 +0000 UTC m=+746.941546825" Oct 07 19:30:21 crc kubenswrapper[4813]: I1007 19:30:21.840444 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-859cbccb7-tv7vc" Oct 07 19:30:22 crc kubenswrapper[4813]: I1007 19:30:22.078580 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:30:22 crc kubenswrapper[4813]: I1007 19:30:22.078669 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:30:22 crc kubenswrapper[4813]: I1007 19:30:22.316803 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:22 crc kubenswrapper[4813]: I1007 19:30:22.316880 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:22 crc kubenswrapper[4813]: I1007 19:30:22.342984 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:22 crc kubenswrapper[4813]: I1007 19:30:22.869280 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-r7css" Oct 07 19:30:24 crc kubenswrapper[4813]: I1007 19:30:24.158570 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-64plq" Oct 07 19:30:24 crc kubenswrapper[4813]: I1007 19:30:24.955919 4813 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.348226 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd"] Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.351722 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.355889 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-mhwtn" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.369600 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd"] Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.456483 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-bundle\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.456670 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m7n9\" (UniqueName: \"kubernetes.io/projected/0413a8f5-bc87-4e1c-b38c-778c0fff449c-kube-api-access-9m7n9\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.456713 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-util\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.558362 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m7n9\" (UniqueName: \"kubernetes.io/projected/0413a8f5-bc87-4e1c-b38c-778c0fff449c-kube-api-access-9m7n9\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.558444 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-util\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.558547 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-bundle\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.559419 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-bundle\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.560301 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-util\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.594569 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m7n9\" (UniqueName: \"kubernetes.io/projected/0413a8f5-bc87-4e1c-b38c-778c0fff449c-kube-api-access-9m7n9\") pod \"a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:30 crc kubenswrapper[4813]: I1007 19:30:30.680153 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:31 crc kubenswrapper[4813]: I1007 19:30:31.105057 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd"] Oct 07 19:30:31 crc kubenswrapper[4813]: I1007 19:30:31.910882 4813 generic.go:334] "Generic (PLEG): container finished" podID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerID="7cc5d7795dcc76b41a0446c67a271322d9028e84c3b78f32521f8d3616a4c642" exitCode=0 Oct 07 19:30:31 crc kubenswrapper[4813]: I1007 19:30:31.910965 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" event={"ID":"0413a8f5-bc87-4e1c-b38c-778c0fff449c","Type":"ContainerDied","Data":"7cc5d7795dcc76b41a0446c67a271322d9028e84c3b78f32521f8d3616a4c642"} Oct 07 19:30:31 crc kubenswrapper[4813]: I1007 19:30:31.911348 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" event={"ID":"0413a8f5-bc87-4e1c-b38c-778c0fff449c","Type":"ContainerStarted","Data":"2453fca3578c581e1751b9e51331cd88ec8b5ce4deae672f35cac8535ed0033f"} Oct 07 19:30:32 crc kubenswrapper[4813]: I1007 19:30:32.919952 4813 generic.go:334] "Generic (PLEG): container finished" podID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerID="c58554c4b3a5eec0065b4f02f079d0482bfc827d98acee1a9fb6f9ff241fa8a2" exitCode=0 Oct 07 19:30:32 crc kubenswrapper[4813]: I1007 19:30:32.920177 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" event={"ID":"0413a8f5-bc87-4e1c-b38c-778c0fff449c","Type":"ContainerDied","Data":"c58554c4b3a5eec0065b4f02f079d0482bfc827d98acee1a9fb6f9ff241fa8a2"} Oct 07 19:30:33 crc kubenswrapper[4813]: I1007 19:30:33.932777 4813 generic.go:334] "Generic (PLEG): container finished" podID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerID="b65376cd4ea8ea596eee6351538b93957a75657a54c6dda759d22c25aa1f86a5" exitCode=0 Oct 07 19:30:33 crc kubenswrapper[4813]: I1007 19:30:33.932847 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" event={"ID":"0413a8f5-bc87-4e1c-b38c-778c0fff449c","Type":"ContainerDied","Data":"b65376cd4ea8ea596eee6351538b93957a75657a54c6dda759d22c25aa1f86a5"} Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.344701 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.430783 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m7n9\" (UniqueName: \"kubernetes.io/projected/0413a8f5-bc87-4e1c-b38c-778c0fff449c-kube-api-access-9m7n9\") pod \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.430892 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-bundle\") pod \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.430927 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-util\") pod \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\" (UID: \"0413a8f5-bc87-4e1c-b38c-778c0fff449c\") " Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.431729 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-bundle" (OuterVolumeSpecName: "bundle") pod "0413a8f5-bc87-4e1c-b38c-778c0fff449c" (UID: "0413a8f5-bc87-4e1c-b38c-778c0fff449c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.436798 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0413a8f5-bc87-4e1c-b38c-778c0fff449c-kube-api-access-9m7n9" (OuterVolumeSpecName: "kube-api-access-9m7n9") pod "0413a8f5-bc87-4e1c-b38c-778c0fff449c" (UID: "0413a8f5-bc87-4e1c-b38c-778c0fff449c"). InnerVolumeSpecName "kube-api-access-9m7n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.443509 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-util" (OuterVolumeSpecName: "util") pod "0413a8f5-bc87-4e1c-b38c-778c0fff449c" (UID: "0413a8f5-bc87-4e1c-b38c-778c0fff449c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.532560 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m7n9\" (UniqueName: \"kubernetes.io/projected/0413a8f5-bc87-4e1c-b38c-778c0fff449c-kube-api-access-9m7n9\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.532603 4813 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.532612 4813 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0413a8f5-bc87-4e1c-b38c-778c0fff449c-util\") on node \"crc\" DevicePath \"\"" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.953225 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" event={"ID":"0413a8f5-bc87-4e1c-b38c-778c0fff449c","Type":"ContainerDied","Data":"2453fca3578c581e1751b9e51331cd88ec8b5ce4deae672f35cac8535ed0033f"} Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.953603 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2453fca3578c581e1751b9e51331cd88ec8b5ce4deae672f35cac8535ed0033f" Oct 07 19:30:35 crc kubenswrapper[4813]: I1007 19:30:35.953267 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.211945 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv"] Oct 07 19:30:43 crc kubenswrapper[4813]: E1007 19:30:43.212918 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="pull" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.212940 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="pull" Oct 07 19:30:43 crc kubenswrapper[4813]: E1007 19:30:43.212979 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="extract" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.212990 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="extract" Oct 07 19:30:43 crc kubenswrapper[4813]: E1007 19:30:43.213010 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="util" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.213023 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="util" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.213184 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0413a8f5-bc87-4e1c-b38c-778c0fff449c" containerName="extract" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.214182 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.219399 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-8qj89" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.253169 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv"] Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.336152 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbfg2\" (UniqueName: \"kubernetes.io/projected/e0480957-44d9-4dcf-915f-ba4db55ad450-kube-api-access-nbfg2\") pod \"openstack-operator-controller-operator-765cf949f-6sxnv\" (UID: \"e0480957-44d9-4dcf-915f-ba4db55ad450\") " pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.437823 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbfg2\" (UniqueName: \"kubernetes.io/projected/e0480957-44d9-4dcf-915f-ba4db55ad450-kube-api-access-nbfg2\") pod \"openstack-operator-controller-operator-765cf949f-6sxnv\" (UID: \"e0480957-44d9-4dcf-915f-ba4db55ad450\") " pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.470767 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbfg2\" (UniqueName: \"kubernetes.io/projected/e0480957-44d9-4dcf-915f-ba4db55ad450-kube-api-access-nbfg2\") pod \"openstack-operator-controller-operator-765cf949f-6sxnv\" (UID: \"e0480957-44d9-4dcf-915f-ba4db55ad450\") " pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.530885 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:30:43 crc kubenswrapper[4813]: I1007 19:30:43.964507 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv"] Oct 07 19:30:44 crc kubenswrapper[4813]: I1007 19:30:44.015730 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" event={"ID":"e0480957-44d9-4dcf-915f-ba4db55ad450","Type":"ContainerStarted","Data":"b19eebaab8f72c7c8a81dee08c543ed871a214465f555874dbf7e7024d2585d2"} Oct 07 19:30:49 crc kubenswrapper[4813]: I1007 19:30:49.072869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" event={"ID":"e0480957-44d9-4dcf-915f-ba4db55ad450","Type":"ContainerStarted","Data":"fa2cdfceee4c007391ec1672f1103392c942ea4b419f7a192676e45acb12f9fe"} Oct 07 19:30:51 crc kubenswrapper[4813]: I1007 19:30:51.087741 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" event={"ID":"e0480957-44d9-4dcf-915f-ba4db55ad450","Type":"ContainerStarted","Data":"efabd5eb39b530c64838939a85903329b1c40b07f117deecb844adafc81cf6c8"} Oct 07 19:30:51 crc kubenswrapper[4813]: I1007 19:30:51.088920 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:30:51 crc kubenswrapper[4813]: I1007 19:30:51.126027 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" podStartSLOduration=1.519947819 podStartE2EDuration="8.126005551s" podCreationTimestamp="2025-10-07 19:30:43 +0000 UTC" firstStartedPulling="2025-10-07 19:30:43.981040677 +0000 UTC m=+770.059296288" lastFinishedPulling="2025-10-07 19:30:50.587098409 +0000 UTC m=+776.665354020" observedRunningTime="2025-10-07 19:30:51.123763655 +0000 UTC m=+777.202019286" watchObservedRunningTime="2025-10-07 19:30:51.126005551 +0000 UTC m=+777.204261172" Oct 07 19:30:52 crc kubenswrapper[4813]: I1007 19:30:52.078857 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:30:52 crc kubenswrapper[4813]: I1007 19:30:52.078907 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:30:52 crc kubenswrapper[4813]: I1007 19:30:52.078950 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:30:52 crc kubenswrapper[4813]: I1007 19:30:52.079355 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"65fe0b5a9444ed388154693078866b82b9f87cf7cbddae0e9656f26066276d1a"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:30:52 crc kubenswrapper[4813]: I1007 19:30:52.079408 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://65fe0b5a9444ed388154693078866b82b9f87cf7cbddae0e9656f26066276d1a" gracePeriod=600 Oct 07 19:30:53 crc kubenswrapper[4813]: I1007 19:30:53.104275 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="65fe0b5a9444ed388154693078866b82b9f87cf7cbddae0e9656f26066276d1a" exitCode=0 Oct 07 19:30:53 crc kubenswrapper[4813]: I1007 19:30:53.104350 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"65fe0b5a9444ed388154693078866b82b9f87cf7cbddae0e9656f26066276d1a"} Oct 07 19:30:53 crc kubenswrapper[4813]: I1007 19:30:53.104781 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"c4e3a874402bcde4b4b4d8190142ef2959a5d27f6fb1ca4f9803d48de7b2c187"} Oct 07 19:30:53 crc kubenswrapper[4813]: I1007 19:30:53.104818 4813 scope.go:117] "RemoveContainer" containerID="b78c84794157bdaf4c6d8429f03a3dc0ddbbcbef98ccb9a89291d17bfc31a4dd" Oct 07 19:30:53 crc kubenswrapper[4813]: I1007 19:30:53.536234 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-765cf949f-6sxnv" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.093593 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.095313 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.102548 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.102827 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-v9pt8" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.103704 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.109478 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-mmvsw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.123627 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.132090 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.143618 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.145932 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.156703 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-lqntz" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.161161 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.164111 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.172649 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-sd67j" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.201494 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz5h2\" (UniqueName: \"kubernetes.io/projected/145ac332-1c3f-4aec-8438-0c3d36ca2c67-kube-api-access-pz5h2\") pod \"designate-operator-controller-manager-687df44cdb-ndtgw\" (UID: \"145ac332-1c3f-4aec-8438-0c3d36ca2c67\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.201780 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5vqf\" (UniqueName: \"kubernetes.io/projected/8ff43feb-7984-4f63-b5b4-ab460e72ddc8-kube-api-access-j5vqf\") pod \"cinder-operator-controller-manager-59cdc64769-m49nk\" (UID: \"8ff43feb-7984-4f63-b5b4-ab460e72ddc8\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.201798 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2822\" (UniqueName: \"kubernetes.io/projected/e90691e1-eed5-4c60-af67-46cfca160910-kube-api-access-q2822\") pod \"barbican-operator-controller-manager-64f84fcdbb-fgdgs\" (UID: \"e90691e1-eed5-4c60-af67-46cfca160910\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.201823 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-446s4\" (UniqueName: \"kubernetes.io/projected/66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2-kube-api-access-446s4\") pod \"glance-operator-controller-manager-7bb46cd7d-lk6lc\" (UID: \"66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.207901 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.228097 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.242388 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.243270 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.253005 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-hjnrf" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.261284 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.292938 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.315519 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.315769 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-r2p2d" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.319415 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2tfc\" (UniqueName: \"kubernetes.io/projected/0a182939-eba6-4da5-9e36-567b6a2a37c3-kube-api-access-k2tfc\") pod \"horizon-operator-controller-manager-6d74794d9b-zpbgs\" (UID: \"0a182939-eba6-4da5-9e36-567b6a2a37c3\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.319490 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz5h2\" (UniqueName: \"kubernetes.io/projected/145ac332-1c3f-4aec-8438-0c3d36ca2c67-kube-api-access-pz5h2\") pod \"designate-operator-controller-manager-687df44cdb-ndtgw\" (UID: \"145ac332-1c3f-4aec-8438-0c3d36ca2c67\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.319579 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5vqf\" (UniqueName: \"kubernetes.io/projected/8ff43feb-7984-4f63-b5b4-ab460e72ddc8-kube-api-access-j5vqf\") pod \"cinder-operator-controller-manager-59cdc64769-m49nk\" (UID: \"8ff43feb-7984-4f63-b5b4-ab460e72ddc8\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.319719 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2822\" (UniqueName: \"kubernetes.io/projected/e90691e1-eed5-4c60-af67-46cfca160910-kube-api-access-q2822\") pod \"barbican-operator-controller-manager-64f84fcdbb-fgdgs\" (UID: \"e90691e1-eed5-4c60-af67-46cfca160910\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.319910 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-446s4\" (UniqueName: \"kubernetes.io/projected/66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2-kube-api-access-446s4\") pod \"glance-operator-controller-manager-7bb46cd7d-lk6lc\" (UID: \"66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.342944 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.355756 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.362849 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2822\" (UniqueName: \"kubernetes.io/projected/e90691e1-eed5-4c60-af67-46cfca160910-kube-api-access-q2822\") pod \"barbican-operator-controller-manager-64f84fcdbb-fgdgs\" (UID: \"e90691e1-eed5-4c60-af67-46cfca160910\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.369273 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5vqf\" (UniqueName: \"kubernetes.io/projected/8ff43feb-7984-4f63-b5b4-ab460e72ddc8-kube-api-access-j5vqf\") pod \"cinder-operator-controller-manager-59cdc64769-m49nk\" (UID: \"8ff43feb-7984-4f63-b5b4-ab460e72ddc8\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.372407 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.374240 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-k9fjx" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.376290 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.386736 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz5h2\" (UniqueName: \"kubernetes.io/projected/145ac332-1c3f-4aec-8438-0c3d36ca2c67-kube-api-access-pz5h2\") pod \"designate-operator-controller-manager-687df44cdb-ndtgw\" (UID: \"145ac332-1c3f-4aec-8438-0c3d36ca2c67\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.390812 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.398614 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-446s4\" (UniqueName: \"kubernetes.io/projected/66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2-kube-api-access-446s4\") pod \"glance-operator-controller-manager-7bb46cd7d-lk6lc\" (UID: \"66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.417069 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.418120 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.419863 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.422309 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2tfc\" (UniqueName: \"kubernetes.io/projected/0a182939-eba6-4da5-9e36-567b6a2a37c3-kube-api-access-k2tfc\") pod \"horizon-operator-controller-manager-6d74794d9b-zpbgs\" (UID: \"0a182939-eba6-4da5-9e36-567b6a2a37c3\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.422397 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72b45aa2-2bd2-4339-8a89-5a2910798969-cert\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.422431 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6wsh\" (UniqueName: \"kubernetes.io/projected/ff4408c4-9269-43c0-8016-520816b8cd5d-kube-api-access-l6wsh\") pod \"heat-operator-controller-manager-6d9967f8dd-mtpdz\" (UID: \"ff4408c4-9269-43c0-8016-520816b8cd5d\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.422457 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnwdp\" (UniqueName: \"kubernetes.io/projected/72b45aa2-2bd2-4339-8a89-5a2910798969-kube-api-access-mnwdp\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.424164 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-r7j46" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.429243 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.429977 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.461767 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.465058 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.474058 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-mm2fw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.486715 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.492048 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.493225 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.494535 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2tfc\" (UniqueName: \"kubernetes.io/projected/0a182939-eba6-4da5-9e36-567b6a2a37c3-kube-api-access-k2tfc\") pod \"horizon-operator-controller-manager-6d74794d9b-zpbgs\" (UID: \"0a182939-eba6-4da5-9e36-567b6a2a37c3\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.513918 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.517666 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-4df52" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.519216 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.524412 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6wsh\" (UniqueName: \"kubernetes.io/projected/ff4408c4-9269-43c0-8016-520816b8cd5d-kube-api-access-l6wsh\") pod \"heat-operator-controller-manager-6d9967f8dd-mtpdz\" (UID: \"ff4408c4-9269-43c0-8016-520816b8cd5d\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.524446 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2l5t\" (UniqueName: \"kubernetes.io/projected/0bac4f10-1d47-40aa-b93e-9a0789801e9b-kube-api-access-q2l5t\") pod \"ironic-operator-controller-manager-74cb5cbc49-662qq\" (UID: \"0bac4f10-1d47-40aa-b93e-9a0789801e9b\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.524471 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnwdp\" (UniqueName: \"kubernetes.io/projected/72b45aa2-2bd2-4339-8a89-5a2910798969-kube-api-access-mnwdp\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.524525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn7pn\" (UniqueName: \"kubernetes.io/projected/b487945e-823b-4d95-a1dc-6f7148aa053c-kube-api-access-pn7pn\") pod \"keystone-operator-controller-manager-ddb98f99b-kjl4l\" (UID: \"b487945e-823b-4d95-a1dc-6f7148aa053c\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.524574 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72b45aa2-2bd2-4339-8a89-5a2910798969-cert\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:25 crc kubenswrapper[4813]: E1007 19:31:25.524700 4813 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 07 19:31:25 crc kubenswrapper[4813]: E1007 19:31:25.524753 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72b45aa2-2bd2-4339-8a89-5a2910798969-cert podName:72b45aa2-2bd2-4339-8a89-5a2910798969 nodeName:}" failed. No retries permitted until 2025-10-07 19:31:26.024733562 +0000 UTC m=+812.102989173 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/72b45aa2-2bd2-4339-8a89-5a2910798969-cert") pod "infra-operator-controller-manager-585fc5b659-gngt9" (UID: "72b45aa2-2bd2-4339-8a89-5a2910798969") : secret "infra-operator-webhook-server-cert" not found Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.542464 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.543516 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.561428 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.573099 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p7fld"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.574174 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.586261 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.593080 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.594972 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-k7v72" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.600843 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7fld"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.612108 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.613181 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.619578 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnwdp\" (UniqueName: \"kubernetes.io/projected/72b45aa2-2bd2-4339-8a89-5a2910798969-kube-api-access-mnwdp\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.630900 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn7pn\" (UniqueName: \"kubernetes.io/projected/b487945e-823b-4d95-a1dc-6f7148aa053c-kube-api-access-pn7pn\") pod \"keystone-operator-controller-manager-ddb98f99b-kjl4l\" (UID: \"b487945e-823b-4d95-a1dc-6f7148aa053c\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.630953 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-catalog-content\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.630974 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjbgb\" (UniqueName: \"kubernetes.io/projected/ff2bb528-f133-456a-9e91-5f4ef07a4f2f-kube-api-access-sjbgb\") pod \"mariadb-operator-controller-manager-5777b4f897-6xkm2\" (UID: \"ff2bb528-f133-456a-9e91-5f4ef07a4f2f\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.631040 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2l5t\" (UniqueName: \"kubernetes.io/projected/0bac4f10-1d47-40aa-b93e-9a0789801e9b-kube-api-access-q2l5t\") pod \"ironic-operator-controller-manager-74cb5cbc49-662qq\" (UID: \"0bac4f10-1d47-40aa-b93e-9a0789801e9b\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.631064 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df4fj\" (UniqueName: \"kubernetes.io/projected/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-kube-api-access-df4fj\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.631106 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzdfs\" (UniqueName: \"kubernetes.io/projected/eb9b4085-2e2d-4955-bbd3-2c53bcada088-kube-api-access-fzdfs\") pod \"manila-operator-controller-manager-59578bc799-v6ggr\" (UID: \"eb9b4085-2e2d-4955-bbd3-2c53bcada088\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.631149 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-utilities\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.633749 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-fgkq5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.634148 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.635120 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.648229 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-9grfn" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.649175 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.684805 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2l5t\" (UniqueName: \"kubernetes.io/projected/0bac4f10-1d47-40aa-b93e-9a0789801e9b-kube-api-access-q2l5t\") pod \"ironic-operator-controller-manager-74cb5cbc49-662qq\" (UID: \"0bac4f10-1d47-40aa-b93e-9a0789801e9b\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.685991 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6wsh\" (UniqueName: \"kubernetes.io/projected/ff4408c4-9269-43c0-8016-520816b8cd5d-kube-api-access-l6wsh\") pod \"heat-operator-controller-manager-6d9967f8dd-mtpdz\" (UID: \"ff4408c4-9269-43c0-8016-520816b8cd5d\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.693066 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.704582 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.705543 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.710358 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-696gx" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.727797 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn7pn\" (UniqueName: \"kubernetes.io/projected/b487945e-823b-4d95-a1dc-6f7148aa053c-kube-api-access-pn7pn\") pod \"keystone-operator-controller-manager-ddb98f99b-kjl4l\" (UID: \"b487945e-823b-4d95-a1dc-6f7148aa053c\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.730375 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735126 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df4fj\" (UniqueName: \"kubernetes.io/projected/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-kube-api-access-df4fj\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbph9\" (UniqueName: \"kubernetes.io/projected/954d30ae-2fcd-4d29-8d44-a1cf40b56f27-kube-api-access-cbph9\") pod \"neutron-operator-controller-manager-797d478b46-xzgn5\" (UID: \"954d30ae-2fcd-4d29-8d44-a1cf40b56f27\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735210 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzdfs\" (UniqueName: \"kubernetes.io/projected/eb9b4085-2e2d-4955-bbd3-2c53bcada088-kube-api-access-fzdfs\") pod \"manila-operator-controller-manager-59578bc799-v6ggr\" (UID: \"eb9b4085-2e2d-4955-bbd3-2c53bcada088\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735235 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-utilities\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735293 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xqpb\" (UniqueName: \"kubernetes.io/projected/8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8-kube-api-access-7xqpb\") pod \"nova-operator-controller-manager-57bb74c7bf-7m9t5\" (UID: \"8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735310 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-catalog-content\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.735342 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjbgb\" (UniqueName: \"kubernetes.io/projected/ff2bb528-f133-456a-9e91-5f4ef07a4f2f-kube-api-access-sjbgb\") pod \"mariadb-operator-controller-manager-5777b4f897-6xkm2\" (UID: \"ff2bb528-f133-456a-9e91-5f4ef07a4f2f\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.736659 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-utilities\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.737571 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-catalog-content\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.753986 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.755071 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.762996 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.763238 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-68wk8" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.792269 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.803188 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.812634 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.813575 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.814198 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzdfs\" (UniqueName: \"kubernetes.io/projected/eb9b4085-2e2d-4955-bbd3-2c53bcada088-kube-api-access-fzdfs\") pod \"manila-operator-controller-manager-59578bc799-v6ggr\" (UID: \"eb9b4085-2e2d-4955-bbd3-2c53bcada088\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.833208 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjbgb\" (UniqueName: \"kubernetes.io/projected/ff2bb528-f133-456a-9e91-5f4ef07a4f2f-kube-api-access-sjbgb\") pod \"mariadb-operator-controller-manager-5777b4f897-6xkm2\" (UID: \"ff2bb528-f133-456a-9e91-5f4ef07a4f2f\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.839647 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-zlwtb" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.840295 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.840353 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbph9\" (UniqueName: \"kubernetes.io/projected/954d30ae-2fcd-4d29-8d44-a1cf40b56f27-kube-api-access-cbph9\") pod \"neutron-operator-controller-manager-797d478b46-xzgn5\" (UID: \"954d30ae-2fcd-4d29-8d44-a1cf40b56f27\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.840392 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tv9b\" (UniqueName: \"kubernetes.io/projected/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-kube-api-access-5tv9b\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.840413 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xqpb\" (UniqueName: \"kubernetes.io/projected/8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8-kube-api-access-7xqpb\") pod \"nova-operator-controller-manager-57bb74c7bf-7m9t5\" (UID: \"8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.840438 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqpdz\" (UniqueName: \"kubernetes.io/projected/6ae873de-e4da-48cc-9c55-143f61cdf190-kube-api-access-cqpdz\") pod \"octavia-operator-controller-manager-6d7c7ddf95-x9rm6\" (UID: \"6ae873de-e4da-48cc-9c55-143f61cdf190\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.849348 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df4fj\" (UniqueName: \"kubernetes.io/projected/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-kube-api-access-df4fj\") pod \"redhat-marketplace-p7fld\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.849396 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.850428 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.854204 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.864300 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.864730 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-z5sqr" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.884921 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.898014 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.913578 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xqpb\" (UniqueName: \"kubernetes.io/projected/8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8-kube-api-access-7xqpb\") pod \"nova-operator-controller-manager-57bb74c7bf-7m9t5\" (UID: \"8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.914011 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.923655 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbph9\" (UniqueName: \"kubernetes.io/projected/954d30ae-2fcd-4d29-8d44-a1cf40b56f27-kube-api-access-cbph9\") pod \"neutron-operator-controller-manager-797d478b46-xzgn5\" (UID: \"954d30ae-2fcd-4d29-8d44-a1cf40b56f27\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.968924 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.972037 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lffh8\" (UniqueName: \"kubernetes.io/projected/155009c1-92c2-493c-8969-12710fed4ec0-kube-api-access-lffh8\") pod \"ovn-operator-controller-manager-6f96f8c84-vv8xc\" (UID: \"155009c1-92c2-493c-8969-12710fed4ec0\") " pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.972113 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5djxg\" (UniqueName: \"kubernetes.io/projected/60dd68e0-dc15-4515-aab8-91f2cbd44487-kube-api-access-5djxg\") pod \"placement-operator-controller-manager-664664cb68-rhpjg\" (UID: \"60dd68e0-dc15-4515-aab8-91f2cbd44487\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.972152 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tv9b\" (UniqueName: \"kubernetes.io/projected/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-kube-api-access-5tv9b\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.972185 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqpdz\" (UniqueName: \"kubernetes.io/projected/6ae873de-e4da-48cc-9c55-143f61cdf190-kube-api-access-cqpdz\") pod \"octavia-operator-controller-manager-6d7c7ddf95-x9rm6\" (UID: \"6ae873de-e4da-48cc-9c55-143f61cdf190\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.972252 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:25 crc kubenswrapper[4813]: E1007 19:31:25.973594 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 19:31:25 crc kubenswrapper[4813]: E1007 19:31:25.974292 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert podName:d39bfd53-3ae2-4fe1-a07e-9592be7062b6 nodeName:}" failed. No retries permitted until 2025-10-07 19:31:26.474267652 +0000 UTC m=+812.552523263 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" (UID: "d39bfd53-3ae2-4fe1-a07e-9592be7062b6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.981760 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.986988 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx"] Oct 07 19:31:25 crc kubenswrapper[4813]: I1007 19:31:25.991235 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.001231 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-grxcf" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.043062 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.046104 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-775776c574-s879n"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.047718 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqpdz\" (UniqueName: \"kubernetes.io/projected/6ae873de-e4da-48cc-9c55-143f61cdf190-kube-api-access-cqpdz\") pod \"octavia-operator-controller-manager-6d7c7ddf95-x9rm6\" (UID: \"6ae873de-e4da-48cc-9c55-143f61cdf190\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.060074 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.063933 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-tbqh9" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.069429 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tv9b\" (UniqueName: \"kubernetes.io/projected/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-kube-api-access-5tv9b\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.078075 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lffh8\" (UniqueName: \"kubernetes.io/projected/155009c1-92c2-493c-8969-12710fed4ec0-kube-api-access-lffh8\") pod \"ovn-operator-controller-manager-6f96f8c84-vv8xc\" (UID: \"155009c1-92c2-493c-8969-12710fed4ec0\") " pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.078160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5djxg\" (UniqueName: \"kubernetes.io/projected/60dd68e0-dc15-4515-aab8-91f2cbd44487-kube-api-access-5djxg\") pod \"placement-operator-controller-manager-664664cb68-rhpjg\" (UID: \"60dd68e0-dc15-4515-aab8-91f2cbd44487\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.078249 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72b45aa2-2bd2-4339-8a89-5a2910798969-cert\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.078283 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bm95\" (UniqueName: \"kubernetes.io/projected/7f9b49af-fca0-48b3-8291-db67e1597599-kube-api-access-4bm95\") pod \"swift-operator-controller-manager-5f4d5dfdc6-j2xrx\" (UID: \"7f9b49af-fca0-48b3-8291-db67e1597599\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.106847 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/72b45aa2-2bd2-4339-8a89-5a2910798969-cert\") pod \"infra-operator-controller-manager-585fc5b659-gngt9\" (UID: \"72b45aa2-2bd2-4339-8a89-5a2910798969\") " pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.113638 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.120253 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-775776c574-s879n"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.125261 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.128550 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.129781 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.141491 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-8gdhl" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.156115 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lffh8\" (UniqueName: \"kubernetes.io/projected/155009c1-92c2-493c-8969-12710fed4ec0-kube-api-access-lffh8\") pod \"ovn-operator-controller-manager-6f96f8c84-vv8xc\" (UID: \"155009c1-92c2-493c-8969-12710fed4ec0\") " pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.160740 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.174656 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5djxg\" (UniqueName: \"kubernetes.io/projected/60dd68e0-dc15-4515-aab8-91f2cbd44487-kube-api-access-5djxg\") pod \"placement-operator-controller-manager-664664cb68-rhpjg\" (UID: \"60dd68e0-dc15-4515-aab8-91f2cbd44487\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.189001 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bm95\" (UniqueName: \"kubernetes.io/projected/7f9b49af-fca0-48b3-8291-db67e1597599-kube-api-access-4bm95\") pod \"swift-operator-controller-manager-5f4d5dfdc6-j2xrx\" (UID: \"7f9b49af-fca0-48b3-8291-db67e1597599\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.189086 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w2hw\" (UniqueName: \"kubernetes.io/projected/edbe78c4-559a-4296-a16d-37c92634c84f-kube-api-access-4w2hw\") pod \"telemetry-operator-controller-manager-775776c574-s879n\" (UID: \"edbe78c4-559a-4296-a16d-37c92634c84f\") " pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.190143 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.193706 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.201661 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.212691 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-vx84t" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.212697 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.219799 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bm95\" (UniqueName: \"kubernetes.io/projected/7f9b49af-fca0-48b3-8291-db67e1597599-kube-api-access-4bm95\") pod \"swift-operator-controller-manager-5f4d5dfdc6-j2xrx\" (UID: \"7f9b49af-fca0-48b3-8291-db67e1597599\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.225773 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.290853 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4htr\" (UniqueName: \"kubernetes.io/projected/4ced110c-65fb-4a77-aa0a-1a999a911ec1-kube-api-access-p4htr\") pod \"watcher-operator-controller-manager-5dd4499c96-nhkpb\" (UID: \"4ced110c-65fb-4a77-aa0a-1a999a911ec1\") " pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.290911 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w2hw\" (UniqueName: \"kubernetes.io/projected/edbe78c4-559a-4296-a16d-37c92634c84f-kube-api-access-4w2hw\") pod \"telemetry-operator-controller-manager-775776c574-s879n\" (UID: \"edbe78c4-559a-4296-a16d-37c92634c84f\") " pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.290964 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nl5f\" (UniqueName: \"kubernetes.io/projected/ceb59888-cd38-4300-93ea-d8f00d0b3b6c-kube-api-access-6nl5f\") pod \"test-operator-controller-manager-74665f6cdc-5p9hd\" (UID: \"ceb59888-cd38-4300-93ea-d8f00d0b3b6c\") " pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.335347 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w2hw\" (UniqueName: \"kubernetes.io/projected/edbe78c4-559a-4296-a16d-37c92634c84f-kube-api-access-4w2hw\") pod \"telemetry-operator-controller-manager-775776c574-s879n\" (UID: \"edbe78c4-559a-4296-a16d-37c92634c84f\") " pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.393593 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.394257 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4htr\" (UniqueName: \"kubernetes.io/projected/4ced110c-65fb-4a77-aa0a-1a999a911ec1-kube-api-access-p4htr\") pod \"watcher-operator-controller-manager-5dd4499c96-nhkpb\" (UID: \"4ced110c-65fb-4a77-aa0a-1a999a911ec1\") " pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.394368 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nl5f\" (UniqueName: \"kubernetes.io/projected/ceb59888-cd38-4300-93ea-d8f00d0b3b6c-kube-api-access-6nl5f\") pod \"test-operator-controller-manager-74665f6cdc-5p9hd\" (UID: \"ceb59888-cd38-4300-93ea-d8f00d0b3b6c\") " pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.394714 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.416625 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.440972 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4htr\" (UniqueName: \"kubernetes.io/projected/4ced110c-65fb-4a77-aa0a-1a999a911ec1-kube-api-access-p4htr\") pod \"watcher-operator-controller-manager-5dd4499c96-nhkpb\" (UID: \"4ced110c-65fb-4a77-aa0a-1a999a911ec1\") " pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.441239 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.444930 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nl5f\" (UniqueName: \"kubernetes.io/projected/ceb59888-cd38-4300-93ea-d8f00d0b3b6c-kube-api-access-6nl5f\") pod \"test-operator-controller-manager-74665f6cdc-5p9hd\" (UID: \"ceb59888-cd38-4300-93ea-d8f00d0b3b6c\") " pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.446122 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.447545 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.475507 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.479919 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-r6kb6" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.497086 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgs79\" (UniqueName: \"kubernetes.io/projected/28f57161-1102-46a9-99a0-67fc1fc2ca33-kube-api-access-cgs79\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.497222 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.497302 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:26 crc kubenswrapper[4813]: E1007 19:31:26.497513 4813 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 19:31:26 crc kubenswrapper[4813]: E1007 19:31:26.497608 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert podName:d39bfd53-3ae2-4fe1-a07e-9592be7062b6 nodeName:}" failed. No retries permitted until 2025-10-07 19:31:27.497594484 +0000 UTC m=+813.575850085 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" (UID: "d39bfd53-3ae2-4fe1-a07e-9592be7062b6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.506128 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.561593 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.566025 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.608309 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgs79\" (UniqueName: \"kubernetes.io/projected/28f57161-1102-46a9-99a0-67fc1fc2ca33-kube-api-access-cgs79\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.608671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:26 crc kubenswrapper[4813]: E1007 19:31:26.610369 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 07 19:31:26 crc kubenswrapper[4813]: E1007 19:31:26.610422 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert podName:28f57161-1102-46a9-99a0-67fc1fc2ca33 nodeName:}" failed. No retries permitted until 2025-10-07 19:31:27.110392718 +0000 UTC m=+813.188648329 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert") pod "openstack-operator-controller-manager-6589b7f7cf-b6lcq" (UID: "28f57161-1102-46a9-99a0-67fc1fc2ca33") : secret "webhook-server-cert" not found Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.651856 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgs79\" (UniqueName: \"kubernetes.io/projected/28f57161-1102-46a9-99a0-67fc1fc2ca33-kube-api-access-cgs79\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.713103 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.713778 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.713797 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.713867 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.718741 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.724169 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lprhc\" (UniqueName: \"kubernetes.io/projected/58a86259-bcad-428f-9d1d-5e8c059403a8-kube-api-access-lprhc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5\" (UID: \"58a86259-bcad-428f-9d1d-5e8c059403a8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.725091 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-ps2d5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.740194 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.827622 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lprhc\" (UniqueName: \"kubernetes.io/projected/58a86259-bcad-428f-9d1d-5e8c059403a8-kube-api-access-lprhc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5\" (UID: \"58a86259-bcad-428f-9d1d-5e8c059403a8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.850111 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs"] Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.857086 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lprhc\" (UniqueName: \"kubernetes.io/projected/58a86259-bcad-428f-9d1d-5e8c059403a8-kube-api-access-lprhc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5\" (UID: \"58a86259-bcad-428f-9d1d-5e8c059403a8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" Oct 07 19:31:26 crc kubenswrapper[4813]: I1007 19:31:26.883929 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc"] Oct 07 19:31:26 crc kubenswrapper[4813]: W1007 19:31:26.887731 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode90691e1_eed5_4c60_af67_46cfca160910.slice/crio-0a72589e63a042f0f5ec7fbc7e0371398dcae74012a6bf6b8bac5d3809e03c73 WatchSource:0}: Error finding container 0a72589e63a042f0f5ec7fbc7e0371398dcae74012a6bf6b8bac5d3809e03c73: Status 404 returned error can't find the container with id 0a72589e63a042f0f5ec7fbc7e0371398dcae74012a6bf6b8bac5d3809e03c73 Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.100889 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.134498 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:27 crc kubenswrapper[4813]: E1007 19:31:27.134692 4813 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 07 19:31:27 crc kubenswrapper[4813]: E1007 19:31:27.134744 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert podName:28f57161-1102-46a9-99a0-67fc1fc2ca33 nodeName:}" failed. No retries permitted until 2025-10-07 19:31:28.13473047 +0000 UTC m=+814.212986081 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert") pod "openstack-operator-controller-manager-6589b7f7cf-b6lcq" (UID: "28f57161-1102-46a9-99a0-67fc1fc2ca33") : secret "webhook-server-cert" not found Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.430531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" event={"ID":"145ac332-1c3f-4aec-8438-0c3d36ca2c67","Type":"ContainerStarted","Data":"345d6aeac240b3945f3ec527d7dc8beee2ff76ddc8d40d2baa0156c8807cae9c"} Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.434485 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" event={"ID":"0a182939-eba6-4da5-9e36-567b6a2a37c3","Type":"ContainerStarted","Data":"7c52154992e370a0f261afcf93ea5184b619c78948b52544999528315ce02ce9"} Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.443783 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" event={"ID":"66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2","Type":"ContainerStarted","Data":"96de1a5c45eaace91fbc38fb7540411d69bd831ab7578eaba56e377551f8787e"} Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.456009 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" event={"ID":"e90691e1-eed5-4c60-af67-46cfca160910","Type":"ContainerStarted","Data":"0a72589e63a042f0f5ec7fbc7e0371398dcae74012a6bf6b8bac5d3809e03c73"} Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.476237 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" event={"ID":"8ff43feb-7984-4f63-b5b4-ab460e72ddc8","Type":"ContainerStarted","Data":"b42a6f693bec73094033691b592b959a9c28d787c0728d1ddf2f535e37856c3d"} Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.553058 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.580179 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l"] Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.581534 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d39bfd53-3ae2-4fe1-a07e-9592be7062b6-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q\" (UID: \"d39bfd53-3ae2-4fe1-a07e-9592be7062b6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.602975 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7fld"] Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.612795 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2"] Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.743278 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5"] Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.774003 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq"] Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.781260 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5"] Oct 07 19:31:27 crc kubenswrapper[4813]: I1007 19:31:27.783071 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.173277 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.177225 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.202356 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/28f57161-1102-46a9-99a0-67fc1fc2ca33-cert\") pod \"openstack-operator-controller-manager-6589b7f7cf-b6lcq\" (UID: \"28f57161-1102-46a9-99a0-67fc1fc2ca33\") " pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.250771 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.255768 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.261241 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.284466 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.315197 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.325440 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.328110 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-775776c574-s879n"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.340610 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6"] Oct 07 19:31:28 crc kubenswrapper[4813]: W1007 19:31:28.346555 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f9b49af_fca0_48b3_8291_db67e1597599.slice/crio-ff7ee1986edcd08ad200433c67697968d0ba213b7ba2704299b0c4cd96c7144f WatchSource:0}: Error finding container ff7ee1986edcd08ad200433c67697968d0ba213b7ba2704299b0c4cd96c7144f: Status 404 returned error can't find the container with id ff7ee1986edcd08ad200433c67697968d0ba213b7ba2704299b0c4cd96c7144f Oct 07 19:31:28 crc kubenswrapper[4813]: W1007 19:31:28.350674 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedbe78c4_559a_4296_a16d_37c92634c84f.slice/crio-5c9ce3c2d1ed99311759fb2ebf182f3d5ebb3c5c147fd323d81a0066a979a98f WatchSource:0}: Error finding container 5c9ce3c2d1ed99311759fb2ebf182f3d5ebb3c5c147fd323d81a0066a979a98f: Status 404 returned error can't find the container with id 5c9ce3c2d1ed99311759fb2ebf182f3d5ebb3c5c147fd323d81a0066a979a98f Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.352195 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.368881 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:9d26476523320d70d6d457b91663e8c233ed320d77032a7c57a89ce1aedd3931,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4w2hw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-775776c574-s879n_openstack-operators(edbe78c4-559a-4296-a16d-37c92634c84f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.408964 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:efa8fb78cffb573d299ffcc7bab1099affd2dbbab222152092b313074306e0a9,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6nl5f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-74665f6cdc-5p9hd_openstack-operators(ceb59888-cd38-4300-93ea-d8f00d0b3b6c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.495054 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" event={"ID":"954d30ae-2fcd-4d29-8d44-a1cf40b56f27","Type":"ContainerStarted","Data":"8f44315455c82239b8fe4295cd90bef9a04ea3ba45c7c11a37848af196e994d1"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.497520 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" event={"ID":"eb9b4085-2e2d-4955-bbd3-2c53bcada088","Type":"ContainerStarted","Data":"4d60228e0543f8e4de69b8e1cf462b0fe8858a79a6ef2f69265807333280e65d"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.499024 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" event={"ID":"ff2bb528-f133-456a-9e91-5f4ef07a4f2f","Type":"ContainerStarted","Data":"26f17782ed8d412986daa7a056319dce0db4d3b42c2f1f454ffc4618608c478c"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.523479 4813 generic.go:334] "Generic (PLEG): container finished" podID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerID="ce25a474474338e180df3a13b18170fecc3e3de8c4391b6745d56b3c49dc9601" exitCode=0 Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.523562 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7fld" event={"ID":"8291dfc0-99c2-4184-bbe8-dcd060f0f69b","Type":"ContainerDied","Data":"ce25a474474338e180df3a13b18170fecc3e3de8c4391b6745d56b3c49dc9601"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.523588 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7fld" event={"ID":"8291dfc0-99c2-4184-bbe8-dcd060f0f69b","Type":"ContainerStarted","Data":"1f9f61bf709756d9423f84a6431b6b41d05edfba91ba1292a5b103c39bb7f5bb"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.535018 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.547854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5"] Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.548783 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" event={"ID":"60dd68e0-dc15-4515-aab8-91f2cbd44487","Type":"ContainerStarted","Data":"23cb1bed0c102f1790e44b4519a33fd077faccd5a05bad0bbfdbc1cd6c365083"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.564898 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" event={"ID":"b487945e-823b-4d95-a1dc-6f7148aa053c","Type":"ContainerStarted","Data":"631831e9dc9abb38a544f3d8ee66626b53c88dd24f4f6c8d8f1821c3b64fe77d"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.581837 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" event={"ID":"0bac4f10-1d47-40aa-b93e-9a0789801e9b","Type":"ContainerStarted","Data":"d12185d0d697a6644bebc3dc0c337aea65a8792daae305eca66eb4e2ea0049a4"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.595787 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" event={"ID":"7f9b49af-fca0-48b3-8291-db67e1597599","Type":"ContainerStarted","Data":"ff7ee1986edcd08ad200433c67697968d0ba213b7ba2704299b0c4cd96c7144f"} Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.622546 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:e4ae07e859166fc5e2cb4f8e0e2c3358b9d2e2d6721a3864d2e0c651d36698ca,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p4htr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5dd4499c96-nhkpb_openstack-operators(4ced110c-65fb-4a77-aa0a-1a999a911ec1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.643842 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lprhc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5_openstack-operators(58a86259-bcad-428f-9d1d-5e8c059403a8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.644552 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" event={"ID":"6ae873de-e4da-48cc-9c55-143f61cdf190","Type":"ContainerStarted","Data":"57d981ef206bba355053d3fc5611a39f95b2d6122ade68314d40eda947794d7a"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.644586 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" event={"ID":"ff4408c4-9269-43c0-8016-520816b8cd5d","Type":"ContainerStarted","Data":"24ed2219b681307d8bbc873fafe927576bb9e3391da49e78f10fd546b84ec1f7"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.644597 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" event={"ID":"ceb59888-cd38-4300-93ea-d8f00d0b3b6c","Type":"ContainerStarted","Data":"c51be1238b54f3b0de72d188fcaec905bbd07df519fb1edd600280accf4482cd"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.644622 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" event={"ID":"8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8","Type":"ContainerStarted","Data":"9270c756ebb8d3b5939d5582114a86de2054ae5faf0afd86a85466eab445e940"} Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.644913 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" podUID="58a86259-bcad-428f-9d1d-5e8c059403a8" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.645553 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" event={"ID":"72b45aa2-2bd2-4339-8a89-5a2910798969","Type":"ContainerStarted","Data":"f14216d7ea57c3af64fd5df890f93ec4a3a7b9ba1a7eaeee9ebd928c21a3bb2e"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.646958 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" event={"ID":"155009c1-92c2-493c-8969-12710fed4ec0","Type":"ContainerStarted","Data":"1b671cd6a65238ad18dd2bcd7ac773a4f0b2c5100f5b4fead997c5e667aaa08a"} Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.648615 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" event={"ID":"edbe78c4-559a-4296-a16d-37c92634c84f","Type":"ContainerStarted","Data":"5c9ce3c2d1ed99311759fb2ebf182f3d5ebb3c5c147fd323d81a0066a979a98f"} Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.666366 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" podUID="edbe78c4-559a-4296-a16d-37c92634c84f" Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.723742 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" podUID="ceb59888-cd38-4300-93ea-d8f00d0b3b6c" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.741825 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q"] Oct 07 19:31:28 crc kubenswrapper[4813]: W1007 19:31:28.844908 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd39bfd53_3ae2_4fe1_a07e_9592be7062b6.slice/crio-ecde57d49c1345805d1aafcb5fa2d6762277a162f638b25730e86741ad997cf3 WatchSource:0}: Error finding container ecde57d49c1345805d1aafcb5fa2d6762277a162f638b25730e86741ad997cf3: Status 404 returned error can't find the container with id ecde57d49c1345805d1aafcb5fa2d6762277a162f638b25730e86741ad997cf3 Oct 07 19:31:28 crc kubenswrapper[4813]: E1007 19:31:28.918639 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" podUID="4ced110c-65fb-4a77-aa0a-1a999a911ec1" Oct 07 19:31:28 crc kubenswrapper[4813]: I1007 19:31:28.955054 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq"] Oct 07 19:31:29 crc kubenswrapper[4813]: W1007 19:31:29.013588 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28f57161_1102_46a9_99a0_67fc1fc2ca33.slice/crio-7f345832e83dd75a3444709cb01620b852d413c8a5487ccae15c972871b7dfde WatchSource:0}: Error finding container 7f345832e83dd75a3444709cb01620b852d413c8a5487ccae15c972871b7dfde: Status 404 returned error can't find the container with id 7f345832e83dd75a3444709cb01620b852d413c8a5487ccae15c972871b7dfde Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.703521 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" event={"ID":"58a86259-bcad-428f-9d1d-5e8c059403a8","Type":"ContainerStarted","Data":"6ab2e07276fe4fe87106cbd513494913113c81819809b6507285220091c29e32"} Oct 07 19:31:29 crc kubenswrapper[4813]: E1007 19:31:29.716417 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" podUID="58a86259-bcad-428f-9d1d-5e8c059403a8" Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.718486 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" event={"ID":"edbe78c4-559a-4296-a16d-37c92634c84f","Type":"ContainerStarted","Data":"02d05785ed90d657641ffd08c4ba38d4ce3d2e738f3f9a916d8c719fdaf35a69"} Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.728663 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" event={"ID":"d39bfd53-3ae2-4fe1-a07e-9592be7062b6","Type":"ContainerStarted","Data":"ecde57d49c1345805d1aafcb5fa2d6762277a162f638b25730e86741ad997cf3"} Oct 07 19:31:29 crc kubenswrapper[4813]: E1007 19:31:29.729025 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:9d26476523320d70d6d457b91663e8c233ed320d77032a7c57a89ce1aedd3931\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" podUID="edbe78c4-559a-4296-a16d-37c92634c84f" Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.771367 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" event={"ID":"28f57161-1102-46a9-99a0-67fc1fc2ca33","Type":"ContainerStarted","Data":"6d84cb37aaae6ffd37d912f7bf3894145370d37c43444d60f0076e7836bc2af3"} Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.771515 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" event={"ID":"28f57161-1102-46a9-99a0-67fc1fc2ca33","Type":"ContainerStarted","Data":"7f345832e83dd75a3444709cb01620b852d413c8a5487ccae15c972871b7dfde"} Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.864141 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" event={"ID":"ceb59888-cd38-4300-93ea-d8f00d0b3b6c","Type":"ContainerStarted","Data":"7378cf10a317cebbce3fca0a4407e3bceb7c627f1581144b2e02fffaccc44700"} Oct 07 19:31:29 crc kubenswrapper[4813]: E1007 19:31:29.880342 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:efa8fb78cffb573d299ffcc7bab1099affd2dbbab222152092b313074306e0a9\\\"\"" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" podUID="ceb59888-cd38-4300-93ea-d8f00d0b3b6c" Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.896878 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" event={"ID":"4ced110c-65fb-4a77-aa0a-1a999a911ec1","Type":"ContainerStarted","Data":"7678ee93f6d2e21e8de5a8c2109a929fc2a5fa7c1132ff6a2cb4d5131b3659d4"} Oct 07 19:31:29 crc kubenswrapper[4813]: I1007 19:31:29.896937 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" event={"ID":"4ced110c-65fb-4a77-aa0a-1a999a911ec1","Type":"ContainerStarted","Data":"7ee7585895ae90d3d04a6d92caeeefc1876221d6295322470bdf9944bc54c269"} Oct 07 19:31:29 crc kubenswrapper[4813]: E1007 19:31:29.912525 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:e4ae07e859166fc5e2cb4f8e0e2c3358b9d2e2d6721a3864d2e0c651d36698ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" podUID="4ced110c-65fb-4a77-aa0a-1a999a911ec1" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.704623 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wl725"] Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.706098 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.717400 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wl725"] Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.731568 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-catalog-content\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.731624 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-utilities\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.731720 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhgtq\" (UniqueName: \"kubernetes.io/projected/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-kube-api-access-hhgtq\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.835506 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-utilities\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.835595 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhgtq\" (UniqueName: \"kubernetes.io/projected/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-kube-api-access-hhgtq\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.835648 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-catalog-content\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.835959 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-utilities\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.836023 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-catalog-content\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.870866 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhgtq\" (UniqueName: \"kubernetes.io/projected/6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb-kube-api-access-hhgtq\") pod \"community-operators-wl725\" (UID: \"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb\") " pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.943594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" event={"ID":"28f57161-1102-46a9-99a0-67fc1fc2ca33","Type":"ContainerStarted","Data":"968d1a9deb7f7130110938c39f09f38782b596a18d71cc31ac388ec22f838a22"} Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.944514 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.962134 4813 generic.go:334] "Generic (PLEG): container finished" podID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerID="41715f908d914978b95bd91442f559b5a5bec259f70e2c705b0c0f9a7f22c8a7" exitCode=0 Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.963709 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7fld" event={"ID":"8291dfc0-99c2-4184-bbe8-dcd060f0f69b","Type":"ContainerDied","Data":"41715f908d914978b95bd91442f559b5a5bec259f70e2c705b0c0f9a7f22c8a7"} Oct 07 19:31:30 crc kubenswrapper[4813]: E1007 19:31:30.974482 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:e4ae07e859166fc5e2cb4f8e0e2c3358b9d2e2d6721a3864d2e0c651d36698ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" podUID="4ced110c-65fb-4a77-aa0a-1a999a911ec1" Oct 07 19:31:30 crc kubenswrapper[4813]: E1007 19:31:30.981761 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" podUID="58a86259-bcad-428f-9d1d-5e8c059403a8" Oct 07 19:31:30 crc kubenswrapper[4813]: E1007 19:31:30.982473 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:9d26476523320d70d6d457b91663e8c233ed320d77032a7c57a89ce1aedd3931\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" podUID="edbe78c4-559a-4296-a16d-37c92634c84f" Oct 07 19:31:30 crc kubenswrapper[4813]: E1007 19:31:30.982762 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:efa8fb78cffb573d299ffcc7bab1099affd2dbbab222152092b313074306e0a9\\\"\"" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" podUID="ceb59888-cd38-4300-93ea-d8f00d0b3b6c" Oct 07 19:31:30 crc kubenswrapper[4813]: I1007 19:31:30.987385 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" podStartSLOduration=4.987367751 podStartE2EDuration="4.987367751s" podCreationTimestamp="2025-10-07 19:31:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:31:30.986533676 +0000 UTC m=+817.064789287" watchObservedRunningTime="2025-10-07 19:31:30.987367751 +0000 UTC m=+817.065623382" Oct 07 19:31:31 crc kubenswrapper[4813]: I1007 19:31:31.045778 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wl725" Oct 07 19:31:31 crc kubenswrapper[4813]: I1007 19:31:31.803028 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wl725"] Oct 07 19:31:31 crc kubenswrapper[4813]: W1007 19:31:31.884341 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a5eb2de_dec2_49bf_a6f6_bd4fd672afdb.slice/crio-3205c40af39c0cf90c04907a10ea1ac0426c54306175af827fbbcd07b3f9f2af WatchSource:0}: Error finding container 3205c40af39c0cf90c04907a10ea1ac0426c54306175af827fbbcd07b3f9f2af: Status 404 returned error can't find the container with id 3205c40af39c0cf90c04907a10ea1ac0426c54306175af827fbbcd07b3f9f2af Oct 07 19:31:31 crc kubenswrapper[4813]: I1007 19:31:31.991811 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wl725" event={"ID":"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb","Type":"ContainerStarted","Data":"3205c40af39c0cf90c04907a10ea1ac0426c54306175af827fbbcd07b3f9f2af"} Oct 07 19:31:33 crc kubenswrapper[4813]: I1007 19:31:33.003757 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7fld" event={"ID":"8291dfc0-99c2-4184-bbe8-dcd060f0f69b","Type":"ContainerStarted","Data":"11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0"} Oct 07 19:31:33 crc kubenswrapper[4813]: I1007 19:31:33.010986 4813 generic.go:334] "Generic (PLEG): container finished" podID="6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb" containerID="ce49847acec198af7bbc6e41e34ca5259b822a33c93e61fe2d30c4845e556d8b" exitCode=0 Oct 07 19:31:33 crc kubenswrapper[4813]: I1007 19:31:33.011502 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wl725" event={"ID":"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb","Type":"ContainerDied","Data":"ce49847acec198af7bbc6e41e34ca5259b822a33c93e61fe2d30c4845e556d8b"} Oct 07 19:31:33 crc kubenswrapper[4813]: I1007 19:31:33.029785 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p7fld" podStartSLOduration=4.8301818789999995 podStartE2EDuration="8.029761642s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.542526861 +0000 UTC m=+814.620782462" lastFinishedPulling="2025-10-07 19:31:31.742106614 +0000 UTC m=+817.820362225" observedRunningTime="2025-10-07 19:31:33.029442753 +0000 UTC m=+819.107698364" watchObservedRunningTime="2025-10-07 19:31:33.029761642 +0000 UTC m=+819.108017253" Oct 07 19:31:35 crc kubenswrapper[4813]: I1007 19:31:35.983273 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:35 crc kubenswrapper[4813]: I1007 19:31:35.983629 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:36 crc kubenswrapper[4813]: I1007 19:31:36.026707 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.363032 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6589b7f7cf-b6lcq" Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.867142 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w89rt"] Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.868611 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.876486 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w89rt"] Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.975164 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rgkn\" (UniqueName: \"kubernetes.io/projected/bc9aee39-39c8-4956-b050-8bf23e5617cf-kube-api-access-7rgkn\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.975234 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-catalog-content\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:38 crc kubenswrapper[4813]: I1007 19:31:38.975290 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-utilities\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.076863 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-catalog-content\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.076937 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-utilities\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.077009 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rgkn\" (UniqueName: \"kubernetes.io/projected/bc9aee39-39c8-4956-b050-8bf23e5617cf-kube-api-access-7rgkn\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.077791 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-catalog-content\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.078092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-utilities\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.108543 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rgkn\" (UniqueName: \"kubernetes.io/projected/bc9aee39-39c8-4956-b050-8bf23e5617cf-kube-api-access-7rgkn\") pod \"certified-operators-w89rt\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:39 crc kubenswrapper[4813]: I1007 19:31:39.200505 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:31:43 crc kubenswrapper[4813]: E1007 19:31:43.488070 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:73736f216f886549901fbcfc823b072f73691c9a79ec79e59d100e992b9c1e34" Oct 07 19:31:43 crc kubenswrapper[4813]: E1007 19:31:43.489098 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:73736f216f886549901fbcfc823b072f73691c9a79ec79e59d100e992b9c1e34,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pz5h2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-687df44cdb-ndtgw_openstack-operators(145ac332-1c3f-4aec-8438-0c3d36ca2c67): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:44 crc kubenswrapper[4813]: E1007 19:31:44.214484 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492" Oct 07 19:31:44 crc kubenswrapper[4813]: E1007 19:31:44.214663 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mnwdp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-585fc5b659-gngt9_openstack-operators(72b45aa2-2bd2-4339-8a89-5a2910798969): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:44 crc kubenswrapper[4813]: E1007 19:31:44.667911 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:ee05f2b06405240a8fcdbd430a9e8983b4667f372548334307b68c154e389960" Oct 07 19:31:44 crc kubenswrapper[4813]: E1007 19:31:44.668207 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:ee05f2b06405240a8fcdbd430a9e8983b4667f372548334307b68c154e389960,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q2l5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-74cb5cbc49-662qq_openstack-operators(0bac4f10-1d47-40aa-b93e-9a0789801e9b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:45 crc kubenswrapper[4813]: E1007 19:31:45.236214 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:3cc6bba71197ddf88dd4ba1301542bacbc1fe12e6faab2b69e6960944b3d74a0" Oct 07 19:31:45 crc kubenswrapper[4813]: E1007 19:31:45.236406 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:3cc6bba71197ddf88dd4ba1301542bacbc1fe12e6faab2b69e6960944b3d74a0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-446s4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-7bb46cd7d-lk6lc_openstack-operators(66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:45 crc kubenswrapper[4813]: E1007 19:31:45.653420 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:79b43a69884631c635d2164b95a2d4ec68f5cb33f96da14764f1c710880f3997" Oct 07 19:31:45 crc kubenswrapper[4813]: E1007 19:31:45.653581 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:79b43a69884631c635d2164b95a2d4ec68f5cb33f96da14764f1c710880f3997,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pn7pn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-ddb98f99b-kjl4l_openstack-operators(b487945e-823b-4d95-a1dc-6f7148aa053c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:46 crc kubenswrapper[4813]: I1007 19:31:46.033069 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:46 crc kubenswrapper[4813]: I1007 19:31:46.071434 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7fld"] Oct 07 19:31:46 crc kubenswrapper[4813]: I1007 19:31:46.089852 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p7fld" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="registry-server" containerID="cri-o://11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0" gracePeriod=2 Oct 07 19:31:46 crc kubenswrapper[4813]: E1007 19:31:46.205692 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d33c1f507e1f5b9a4bf226ad98917e92101ac66b36e19d35cbe04ae7014f6bff" Oct 07 19:31:46 crc kubenswrapper[4813]: E1007 19:31:46.205852 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d33c1f507e1f5b9a4bf226ad98917e92101ac66b36e19d35cbe04ae7014f6bff,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5djxg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-664664cb68-rhpjg_openstack-operators(60dd68e0-dc15-4515-aab8-91f2cbd44487): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:46 crc kubenswrapper[4813]: E1007 19:31:46.708917 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:582f7b1e411961b69f2e3c6b346aa25759b89f7720ed3fade1d363bf5d2dffc8" Oct 07 19:31:46 crc kubenswrapper[4813]: E1007 19:31:46.709059 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:582f7b1e411961b69f2e3c6b346aa25759b89f7720ed3fade1d363bf5d2dffc8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fzdfs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-59578bc799-v6ggr_openstack-operators(eb9b4085-2e2d-4955-bbd3-2c53bcada088): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:47 crc kubenswrapper[4813]: I1007 19:31:47.096258 4813 generic.go:334] "Generic (PLEG): container finished" podID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerID="11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0" exitCode=0 Oct 07 19:31:47 crc kubenswrapper[4813]: I1007 19:31:47.096297 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7fld" event={"ID":"8291dfc0-99c2-4184-bbe8-dcd060f0f69b","Type":"ContainerDied","Data":"11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0"} Oct 07 19:31:47 crc kubenswrapper[4813]: E1007 19:31:47.108496 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2" Oct 07 19:31:47 crc kubenswrapper[4813]: E1007 19:31:47.108709 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cbph9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-797d478b46-xzgn5_openstack-operators(954d30ae-2fcd-4d29-8d44-a1cf40b56f27): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:47 crc kubenswrapper[4813]: E1007 19:31:47.580147 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167" Oct 07 19:31:47 crc kubenswrapper[4813]: E1007 19:31:47.580896 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sjbgb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-5777b4f897-6xkm2_openstack-operators(ff2bb528-f133-456a-9e91-5f4ef07a4f2f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:47 crc kubenswrapper[4813]: E1007 19:31:47.996138 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:b2e9acf568a48c28cf2aed6012e432eeeb7d5f0eb11878fc91b62bc34cba10cd" Oct 07 19:31:47 crc kubenswrapper[4813]: E1007 19:31:47.996403 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:b2e9acf568a48c28cf2aed6012e432eeeb7d5f0eb11878fc91b62bc34cba10cd,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7xqpb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-57bb74c7bf-7m9t5_openstack-operators(8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:48 crc kubenswrapper[4813]: E1007 19:31:48.382235 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:c487a793648e64af2d64df5f6efbda2d4fd586acd7aee6838d3ec2b3edd9efb9" Oct 07 19:31:48 crc kubenswrapper[4813]: E1007 19:31:48.382395 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:c487a793648e64af2d64df5f6efbda2d4fd586acd7aee6838d3ec2b3edd9efb9,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j5vqf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-59cdc64769-m49nk_openstack-operators(8ff43feb-7984-4f63-b5b4-ab460e72ddc8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:48 crc kubenswrapper[4813]: I1007 19:31:48.923187 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nlhtj"] Oct 07 19:31:48 crc kubenswrapper[4813]: I1007 19:31:48.928555 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:48 crc kubenswrapper[4813]: I1007 19:31:48.929510 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nlhtj"] Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.044563 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4mrn\" (UniqueName: \"kubernetes.io/projected/9a89425b-9503-4d37-88f9-1be656146b03-kube-api-access-s4mrn\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.044927 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-catalog-content\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.044959 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-utilities\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.146278 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-catalog-content\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.146357 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-utilities\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.146432 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4mrn\" (UniqueName: \"kubernetes.io/projected/9a89425b-9503-4d37-88f9-1be656146b03-kube-api-access-s4mrn\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.147144 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-catalog-content\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.147406 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-utilities\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.178579 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4mrn\" (UniqueName: \"kubernetes.io/projected/9a89425b-9503-4d37-88f9-1be656146b03-kube-api-access-s4mrn\") pod \"redhat-operators-nlhtj\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:49 crc kubenswrapper[4813]: I1007 19:31:49.254491 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:31:50 crc kubenswrapper[4813]: E1007 19:31:50.145470 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 07 19:31:50 crc kubenswrapper[4813]: E1007 19:31:50.145654 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hhgtq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wl725_openshift-marketplace(6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:31:50 crc kubenswrapper[4813]: E1007 19:31:50.146855 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wl725" podUID="6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb" Oct 07 19:31:51 crc kubenswrapper[4813]: E1007 19:31:51.619067 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wl725" podUID="6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb" Oct 07 19:31:55 crc kubenswrapper[4813]: E1007 19:31:55.983761 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0 is running failed: container process not found" containerID="11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 19:31:55 crc kubenswrapper[4813]: E1007 19:31:55.985181 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0 is running failed: container process not found" containerID="11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 19:31:55 crc kubenswrapper[4813]: E1007 19:31:55.985778 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0 is running failed: container process not found" containerID="11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0" cmd=["grpc_health_probe","-addr=:50051"] Oct 07 19:31:55 crc kubenswrapper[4813]: E1007 19:31:55.985832 4813 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-p7fld" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="registry-server" Oct 07 19:31:57 crc kubenswrapper[4813]: E1007 19:31:57.331139 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Oct 07 19:31:57 crc kubenswrapper[4813]: E1007 19:31:57.331494 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lprhc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5_openstack-operators(58a86259-bcad-428f-9d1d-5e8c059403a8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:31:57 crc kubenswrapper[4813]: E1007 19:31:57.333092 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" podUID="58a86259-bcad-428f-9d1d-5e8c059403a8" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.509028 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.572410 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-utilities\") pod \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.572472 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-catalog-content\") pod \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.572518 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df4fj\" (UniqueName: \"kubernetes.io/projected/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-kube-api-access-df4fj\") pod \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\" (UID: \"8291dfc0-99c2-4184-bbe8-dcd060f0f69b\") " Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.573970 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-utilities" (OuterVolumeSpecName: "utilities") pod "8291dfc0-99c2-4184-bbe8-dcd060f0f69b" (UID: "8291dfc0-99c2-4184-bbe8-dcd060f0f69b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.585076 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8291dfc0-99c2-4184-bbe8-dcd060f0f69b" (UID: "8291dfc0-99c2-4184-bbe8-dcd060f0f69b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.591582 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-kube-api-access-df4fj" (OuterVolumeSpecName: "kube-api-access-df4fj") pod "8291dfc0-99c2-4184-bbe8-dcd060f0f69b" (UID: "8291dfc0-99c2-4184-bbe8-dcd060f0f69b"). InnerVolumeSpecName "kube-api-access-df4fj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.675007 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.675039 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.675051 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df4fj\" (UniqueName: \"kubernetes.io/projected/8291dfc0-99c2-4184-bbe8-dcd060f0f69b-kube-api-access-df4fj\") on node \"crc\" DevicePath \"\"" Oct 07 19:31:57 crc kubenswrapper[4813]: I1007 19:31:57.900160 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w89rt"] Oct 07 19:31:57 crc kubenswrapper[4813]: W1007 19:31:57.951429 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc9aee39_39c8_4956_b050_8bf23e5617cf.slice/crio-bddc19156e76fe06b8c30416f7d17496b74b2e351e494054c054df3e82983524 WatchSource:0}: Error finding container bddc19156e76fe06b8c30416f7d17496b74b2e351e494054c054df3e82983524: Status 404 returned error can't find the container with id bddc19156e76fe06b8c30416f7d17496b74b2e351e494054c054df3e82983524 Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.149139 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nlhtj"] Oct 07 19:31:58 crc kubenswrapper[4813]: W1007 19:31:58.157960 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a89425b_9503_4d37_88f9_1be656146b03.slice/crio-c82782c156f5eb61acaae776dc18f511112020d7b6343f8cec9c6cdec4ea1fbe WatchSource:0}: Error finding container c82782c156f5eb61acaae776dc18f511112020d7b6343f8cec9c6cdec4ea1fbe: Status 404 returned error can't find the container with id c82782c156f5eb61acaae776dc18f511112020d7b6343f8cec9c6cdec4ea1fbe Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.225542 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerStarted","Data":"c82782c156f5eb61acaae776dc18f511112020d7b6343f8cec9c6cdec4ea1fbe"} Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.228008 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p7fld" event={"ID":"8291dfc0-99c2-4184-bbe8-dcd060f0f69b","Type":"ContainerDied","Data":"1f9f61bf709756d9423f84a6431b6b41d05edfba91ba1292a5b103c39bb7f5bb"} Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.228049 4813 scope.go:117] "RemoveContainer" containerID="11b589577ceea336cb08a0a511526e68581befd05bc78479ebef51a9b6b2beb0" Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.228165 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p7fld" Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.235155 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerStarted","Data":"bddc19156e76fe06b8c30416f7d17496b74b2e351e494054c054df3e82983524"} Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.267501 4813 scope.go:117] "RemoveContainer" containerID="41715f908d914978b95bd91442f559b5a5bec259f70e2c705b0c0f9a7f22c8a7" Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.271619 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7fld"] Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.279981 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p7fld"] Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.296506 4813 scope.go:117] "RemoveContainer" containerID="ce25a474474338e180df3a13b18170fecc3e3de8c4391b6745d56b3c49dc9601" Oct 07 19:31:58 crc kubenswrapper[4813]: I1007 19:31:58.612353 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" path="/var/lib/kubelet/pods/8291dfc0-99c2-4184-bbe8-dcd060f0f69b/volumes" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.006940 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" podUID="954d30ae-2fcd-4d29-8d44-a1cf40b56f27" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.007610 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" podUID="b487945e-823b-4d95-a1dc-6f7148aa053c" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.172384 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" podUID="72b45aa2-2bd2-4339-8a89-5a2910798969" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.172662 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" podUID="66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.172768 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" podUID="8ff43feb-7984-4f63-b5b4-ab460e72ddc8" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.173021 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" podUID="0bac4f10-1d47-40aa-b93e-9a0789801e9b" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.173148 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" podUID="ff2bb528-f133-456a-9e91-5f4ef07a4f2f" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.173778 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" podUID="eb9b4085-2e2d-4955-bbd3-2c53bcada088" Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.232921 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" podUID="145ac332-1c3f-4aec-8438-0c3d36ca2c67" Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.254812 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" event={"ID":"8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8","Type":"ContainerStarted","Data":"b087e4240d566c7128e7ba759254f634d8f35bd5652051001a0465309ad31c2e"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.256260 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" event={"ID":"0a182939-eba6-4da5-9e36-567b6a2a37c3","Type":"ContainerStarted","Data":"52a46b65f7a99cfc7c8d9853074dbbd10dcc08edf69dc5732c95cd5988c23a68"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.257585 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" event={"ID":"72b45aa2-2bd2-4339-8a89-5a2910798969","Type":"ContainerStarted","Data":"510a91e9e9934f7762be912264de6581a7db6ab3562f4b6f19157b82e47f48b5"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.259942 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" event={"ID":"e90691e1-eed5-4c60-af67-46cfca160910","Type":"ContainerStarted","Data":"a7a4ea27365574f18b8912e42f66297f21b4f36ec53d44bf2071e8238c03f335"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.270020 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" event={"ID":"ceb59888-cd38-4300-93ea-d8f00d0b3b6c","Type":"ContainerStarted","Data":"657dc9ada26ef6b33f1035fb6aa5c130a2e2042dee9ef42a2a8f1c7f5ae94de3"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.270584 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.272927 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" event={"ID":"145ac332-1c3f-4aec-8438-0c3d36ca2c67","Type":"ContainerStarted","Data":"ad65da4fa2acf32e3481c6b6ee066c4779d311cb7d4820608e6fbba7da36a2c4"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.288206 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" event={"ID":"edbe78c4-559a-4296-a16d-37c92634c84f","Type":"ContainerStarted","Data":"60bcd25d8800d7d81b6a7adc4f1f18d413232e90d623de2251391d4fbc2304e7"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.289496 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" event={"ID":"60dd68e0-dc15-4515-aab8-91f2cbd44487","Type":"ContainerStarted","Data":"08bc63e74b1001f11dc3fa8b8d3d35d61c5c9008aa2c4d676822a3375469600f"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.296249 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" event={"ID":"0bac4f10-1d47-40aa-b93e-9a0789801e9b","Type":"ContainerStarted","Data":"97b9c050e9e16c550ccacce28050e1786745f7387b523a5fb763bd1c5556828c"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.302659 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" event={"ID":"6ae873de-e4da-48cc-9c55-143f61cdf190","Type":"ContainerStarted","Data":"25f61ed11e2515452935d4e0a40596a47f4382be7e0b6997287ace53aeded2bf"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.306221 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" event={"ID":"ff2bb528-f133-456a-9e91-5f4ef07a4f2f","Type":"ContainerStarted","Data":"7c5cb914e62e3670018b93fd33609e45861ebb5911cf3e0645b27857c54c2acf"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.326375 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" event={"ID":"155009c1-92c2-493c-8969-12710fed4ec0","Type":"ContainerStarted","Data":"b7ca8bcdbd91417b9cad0fb58d3345ec69a6cc897c3b83b5d768c56c8867a251"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.329712 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" event={"ID":"954d30ae-2fcd-4d29-8d44-a1cf40b56f27","Type":"ContainerStarted","Data":"7769d99ee1e0e546fd8388a7d204faecfc7aaaa8a10a7f236dc607399fc49fad"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.339242 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" event={"ID":"ff4408c4-9269-43c0-8016-520816b8cd5d","Type":"ContainerStarted","Data":"97aa528ec0a87bebad1bbbc7d70067738bb759ba555d3c789fffd24d50eb1fb8"} Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.346794 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" podUID="60dd68e0-dc15-4515-aab8-91f2cbd44487" Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.354869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" event={"ID":"4ced110c-65fb-4a77-aa0a-1a999a911ec1","Type":"ContainerStarted","Data":"b4b6738b7175a912f416fe8b5b03beed225d8ecf9caef22f7eff5dedf7b5297c"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.362588 4813 generic.go:334] "Generic (PLEG): container finished" podID="9a89425b-9503-4d37-88f9-1be656146b03" containerID="a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3" exitCode=0 Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.362677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerDied","Data":"a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.386384 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" event={"ID":"eb9b4085-2e2d-4955-bbd3-2c53bcada088","Type":"ContainerStarted","Data":"eb63cf24dad3a996168376214674fe11ab4e316e9d27b54bf221cd09ed54be76"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.401868 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" event={"ID":"8ff43feb-7984-4f63-b5b4-ab460e72ddc8","Type":"ContainerStarted","Data":"cf18904910fa4b25cfa6ea8aa3d527fb879851ed54d199f968d5204b9c900e1d"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.451560 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" event={"ID":"7f9b49af-fca0-48b3-8291-db67e1597599","Type":"ContainerStarted","Data":"d8903586a5f908b41d5307ef73ea468f08c5008d788a1fac39d3a9e7f49bbe74"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.503979 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" event={"ID":"66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2","Type":"ContainerStarted","Data":"9bb5a29ac0e669004b9eda904a80350e950534757699a56a6f3310b551d28f9d"} Oct 07 19:32:00 crc kubenswrapper[4813]: E1007 19:32:00.520775 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" podUID="8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8" Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.543104 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" event={"ID":"b487945e-823b-4d95-a1dc-6f7148aa053c","Type":"ContainerStarted","Data":"9c70040407fd690cf68e3a6a0984fb5e3df6b07aa418644e3655f8885463af44"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.571427 4813 generic.go:334] "Generic (PLEG): container finished" podID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerID="2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b" exitCode=0 Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.571556 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerDied","Data":"2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.581973 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" event={"ID":"d39bfd53-3ae2-4fe1-a07e-9592be7062b6","Type":"ContainerStarted","Data":"c2b38f85715e77f1f421af3c5bd27f5c90c893237874065cacca617ffa353405"} Oct 07 19:32:00 crc kubenswrapper[4813]: I1007 19:32:00.786612 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" podStartSLOduration=6.478723821 podStartE2EDuration="35.786596799s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.406618613 +0000 UTC m=+814.484874224" lastFinishedPulling="2025-10-07 19:31:57.714491601 +0000 UTC m=+843.792747202" observedRunningTime="2025-10-07 19:32:00.786025522 +0000 UTC m=+846.864281133" watchObservedRunningTime="2025-10-07 19:32:00.786596799 +0000 UTC m=+846.864852410" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.629453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" event={"ID":"0a182939-eba6-4da5-9e36-567b6a2a37c3","Type":"ContainerStarted","Data":"616798941fb006b55db3404e59fcbdff701c106a5ed249beac4e8fc164bfcedb"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.630427 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.656514 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" event={"ID":"d39bfd53-3ae2-4fe1-a07e-9592be7062b6","Type":"ContainerStarted","Data":"73cb6f66638f7a525399b31de11b1367a26a494a2022f2ab6dbd37a90c3b67bf"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.657019 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.659106 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" event={"ID":"7f9b49af-fca0-48b3-8291-db67e1597599","Type":"ContainerStarted","Data":"cd42a484173dbe4a6dc7d1468b599fe709175230e1f95a170975fca8003a772b"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.659467 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.668060 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" podStartSLOduration=15.315811438 podStartE2EDuration="36.668046741s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.015818005 +0000 UTC m=+813.094073616" lastFinishedPulling="2025-10-07 19:31:48.368053308 +0000 UTC m=+834.446308919" observedRunningTime="2025-10-07 19:32:01.667526456 +0000 UTC m=+847.745782057" watchObservedRunningTime="2025-10-07 19:32:01.668046741 +0000 UTC m=+847.746302342" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.677012 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" event={"ID":"0bac4f10-1d47-40aa-b93e-9a0789801e9b","Type":"ContainerStarted","Data":"22d36bb0d406a36d477e12f77e0551fcad2e4091545079204d864d083e5541c1"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.677662 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.693542 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" podStartSLOduration=14.536409082 podStartE2EDuration="36.693522792s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.351443322 +0000 UTC m=+814.429698933" lastFinishedPulling="2025-10-07 19:31:50.508557032 +0000 UTC m=+836.586812643" observedRunningTime="2025-10-07 19:32:01.692813061 +0000 UTC m=+847.771068672" watchObservedRunningTime="2025-10-07 19:32:01.693522792 +0000 UTC m=+847.771778413" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.696794 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" event={"ID":"6ae873de-e4da-48cc-9c55-143f61cdf190","Type":"ContainerStarted","Data":"adfaf628d316bee52eb0b873dc327294da710308062ec2e3c1c8d301d9e10668"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.697436 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.703239 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" event={"ID":"155009c1-92c2-493c-8969-12710fed4ec0","Type":"ContainerStarted","Data":"24bd10da6cded3dfe59b0c17aa2de74c0f9714382f69329f220b9644afff27af"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.704445 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.728253 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" event={"ID":"ff4408c4-9269-43c0-8016-520816b8cd5d","Type":"ContainerStarted","Data":"82701a179568ee461470b8590f09daebf371c4e8d4c2e3fec7f00e31b40b8098"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.728960 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.771702 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" event={"ID":"e90691e1-eed5-4c60-af67-46cfca160910","Type":"ContainerStarted","Data":"c4228cb23cd8493ef9fa174c67b361e177f3e3b4f01289304d40a1a65c303aa5"} Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.771737 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.772383 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.772696 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.773248 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" podStartSLOduration=15.555845432 podStartE2EDuration="36.77323215s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.901482553 +0000 UTC m=+814.979738164" lastFinishedPulling="2025-10-07 19:31:50.118869271 +0000 UTC m=+836.197124882" observedRunningTime="2025-10-07 19:32:01.745615296 +0000 UTC m=+847.823870907" watchObservedRunningTime="2025-10-07 19:32:01.77323215 +0000 UTC m=+847.851487761" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.778483 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" podStartSLOduration=3.762210553 podStartE2EDuration="36.778475674s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.83419828 +0000 UTC m=+813.912453891" lastFinishedPulling="2025-10-07 19:32:00.850463411 +0000 UTC m=+846.928719012" observedRunningTime="2025-10-07 19:32:01.773219339 +0000 UTC m=+847.851474950" watchObservedRunningTime="2025-10-07 19:32:01.778475674 +0000 UTC m=+847.856731285" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.845154 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" podStartSLOduration=13.594707945 podStartE2EDuration="36.845140198s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.36863871 +0000 UTC m=+814.446894311" lastFinishedPulling="2025-10-07 19:31:51.619070943 +0000 UTC m=+837.697326564" observedRunningTime="2025-10-07 19:32:01.808589521 +0000 UTC m=+847.886845132" watchObservedRunningTime="2025-10-07 19:32:01.845140198 +0000 UTC m=+847.923395809" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.848143 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" podStartSLOduration=14.684351919000001 podStartE2EDuration="36.848135226s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.344674822 +0000 UTC m=+814.422930433" lastFinishedPulling="2025-10-07 19:31:50.508458129 +0000 UTC m=+836.586713740" observedRunningTime="2025-10-07 19:32:01.845076666 +0000 UTC m=+847.923332277" watchObservedRunningTime="2025-10-07 19:32:01.848135226 +0000 UTC m=+847.926390837" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.921209 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" podStartSLOduration=7.5754725910000005 podStartE2EDuration="36.921187128s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.368743833 +0000 UTC m=+814.446999444" lastFinishedPulling="2025-10-07 19:31:57.71445837 +0000 UTC m=+843.792713981" observedRunningTime="2025-10-07 19:32:01.920825137 +0000 UTC m=+847.999080738" watchObservedRunningTime="2025-10-07 19:32:01.921187128 +0000 UTC m=+847.999442739" Oct 07 19:32:01 crc kubenswrapper[4813]: I1007 19:32:01.922157 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" podStartSLOduration=15.119204577 podStartE2EDuration="36.922153026s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.335114319 +0000 UTC m=+814.413369920" lastFinishedPulling="2025-10-07 19:31:50.138062768 +0000 UTC m=+836.216318369" observedRunningTime="2025-10-07 19:32:01.886776594 +0000 UTC m=+847.965032205" watchObservedRunningTime="2025-10-07 19:32:01.922153026 +0000 UTC m=+848.000408637" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.048522 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" podStartSLOduration=13.429282202 podStartE2EDuration="37.048504508s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:26.891356855 +0000 UTC m=+812.969612466" lastFinishedPulling="2025-10-07 19:31:50.510579161 +0000 UTC m=+836.588834772" observedRunningTime="2025-10-07 19:32:02.046616642 +0000 UTC m=+848.124872253" watchObservedRunningTime="2025-10-07 19:32:02.048504508 +0000 UTC m=+848.126760109" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.050540 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" podStartSLOduration=7.957412759 podStartE2EDuration="37.050535578s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.622420303 +0000 UTC m=+814.700675914" lastFinishedPulling="2025-10-07 19:31:57.715543122 +0000 UTC m=+843.793798733" observedRunningTime="2025-10-07 19:32:02.021556284 +0000 UTC m=+848.099811895" watchObservedRunningTime="2025-10-07 19:32:02.050535578 +0000 UTC m=+848.128791189" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.778461 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" event={"ID":"72b45aa2-2bd2-4339-8a89-5a2910798969","Type":"ContainerStarted","Data":"2260278a870726b0e83b0ff8e3929069db1449c53931fa5659aef0d47636931b"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.778617 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.780833 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" event={"ID":"66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2","Type":"ContainerStarted","Data":"75454aa20796a80f594488e55136c3029f69b0ee10f30fda38a909054f34a9a6"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.780913 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.783783 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" event={"ID":"8ff43feb-7984-4f63-b5b4-ab460e72ddc8","Type":"ContainerStarted","Data":"aea41dc5e1f8ee6d245fc0a000e7df6787a0f712b0d6723678aa7bd90d9c3438"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.784422 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.786951 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" event={"ID":"ff2bb528-f133-456a-9e91-5f4ef07a4f2f","Type":"ContainerStarted","Data":"c43b48c92a7c8bdd6e0be016e0bed1d3289d09bfb76ef304f102cfed66b56c17"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.787069 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.789100 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" event={"ID":"954d30ae-2fcd-4d29-8d44-a1cf40b56f27","Type":"ContainerStarted","Data":"5921048b704b468f6f921ff86e42aa7b91cff7dfdb0dffca4d36f56eb9a40721"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.789227 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.792713 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerStarted","Data":"3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.795214 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" event={"ID":"145ac332-1c3f-4aec-8438-0c3d36ca2c67","Type":"ContainerStarted","Data":"5fd4faac752584e776b4f5a7d2cd977f97291c972e2144d46d746979b9db06f8"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.795333 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.797623 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" event={"ID":"b487945e-823b-4d95-a1dc-6f7148aa053c","Type":"ContainerStarted","Data":"26e7485de360608dc56f9bcf38be66e86489072ca0bb804c88da4fb124adb729"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.797737 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.799526 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerStarted","Data":"8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.801090 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" event={"ID":"eb9b4085-2e2d-4955-bbd3-2c53bcada088","Type":"ContainerStarted","Data":"5fd50adaf3541c38258c4b37b246c36ad7634036f169cd5328cafc690d4de689"} Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.802057 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.820389 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" podStartSLOduration=4.801231112 podStartE2EDuration="37.820374303s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.327278667 +0000 UTC m=+814.405534278" lastFinishedPulling="2025-10-07 19:32:01.346421858 +0000 UTC m=+847.424677469" observedRunningTime="2025-10-07 19:32:02.813782889 +0000 UTC m=+848.892038500" watchObservedRunningTime="2025-10-07 19:32:02.820374303 +0000 UTC m=+848.898629914" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.841885 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" podStartSLOduration=3.385865155 podStartE2EDuration="37.841865837s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:26.887023987 +0000 UTC m=+812.965279598" lastFinishedPulling="2025-10-07 19:32:01.343024669 +0000 UTC m=+847.421280280" observedRunningTime="2025-10-07 19:32:02.839540858 +0000 UTC m=+848.917796469" watchObservedRunningTime="2025-10-07 19:32:02.841865837 +0000 UTC m=+848.920121448" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.870436 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" podStartSLOduration=4.453629459 podStartE2EDuration="37.870419617s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.742647123 +0000 UTC m=+813.820902734" lastFinishedPulling="2025-10-07 19:32:01.159437281 +0000 UTC m=+847.237692892" observedRunningTime="2025-10-07 19:32:02.865541834 +0000 UTC m=+848.943797445" watchObservedRunningTime="2025-10-07 19:32:02.870419617 +0000 UTC m=+848.948675228" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.931630 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" podStartSLOduration=4.758201864 podStartE2EDuration="37.93161351s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.22150714 +0000 UTC m=+814.299762751" lastFinishedPulling="2025-10-07 19:32:01.394918786 +0000 UTC m=+847.473174397" observedRunningTime="2025-10-07 19:32:02.931217968 +0000 UTC m=+849.009473579" watchObservedRunningTime="2025-10-07 19:32:02.93161351 +0000 UTC m=+849.009869121" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.972505 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" podStartSLOduration=4.328846434 podStartE2EDuration="37.972487254s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.737651095 +0000 UTC m=+813.815906706" lastFinishedPulling="2025-10-07 19:32:01.381291915 +0000 UTC m=+847.459547526" observedRunningTime="2025-10-07 19:32:02.970290689 +0000 UTC m=+849.048546300" watchObservedRunningTime="2025-10-07 19:32:02.972487254 +0000 UTC m=+849.050742865" Oct 07 19:32:02 crc kubenswrapper[4813]: I1007 19:32:02.995276 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" podStartSLOduration=3.607387565 podStartE2EDuration="37.995256935s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.017787683 +0000 UTC m=+813.096043284" lastFinishedPulling="2025-10-07 19:32:01.405657053 +0000 UTC m=+847.483912654" observedRunningTime="2025-10-07 19:32:02.994505092 +0000 UTC m=+849.072760703" watchObservedRunningTime="2025-10-07 19:32:02.995256935 +0000 UTC m=+849.073512546" Oct 07 19:32:03 crc kubenswrapper[4813]: I1007 19:32:03.018406 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" podStartSLOduration=3.546081854 podStartE2EDuration="38.018390666s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:26.883212814 +0000 UTC m=+812.961468425" lastFinishedPulling="2025-10-07 19:32:01.355521626 +0000 UTC m=+847.433777237" observedRunningTime="2025-10-07 19:32:03.014546713 +0000 UTC m=+849.092802324" watchObservedRunningTime="2025-10-07 19:32:03.018390666 +0000 UTC m=+849.096646277" Oct 07 19:32:03 crc kubenswrapper[4813]: I1007 19:32:03.038704 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" podStartSLOduration=4.490610439 podStartE2EDuration="38.038688994s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.833445637 +0000 UTC m=+813.911701248" lastFinishedPulling="2025-10-07 19:32:01.381524202 +0000 UTC m=+847.459779803" observedRunningTime="2025-10-07 19:32:03.037732216 +0000 UTC m=+849.115987827" watchObservedRunningTime="2025-10-07 19:32:03.038688994 +0000 UTC m=+849.116944605" Oct 07 19:32:03 crc kubenswrapper[4813]: I1007 19:32:03.811132 4813 generic.go:334] "Generic (PLEG): container finished" podID="9a89425b-9503-4d37-88f9-1be656146b03" containerID="3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a" exitCode=0 Oct 07 19:32:03 crc kubenswrapper[4813]: I1007 19:32:03.811185 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerDied","Data":"3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a"} Oct 07 19:32:03 crc kubenswrapper[4813]: I1007 19:32:03.815676 4813 generic.go:334] "Generic (PLEG): container finished" podID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerID="8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42" exitCode=0 Oct 07 19:32:03 crc kubenswrapper[4813]: I1007 19:32:03.815794 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerDied","Data":"8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42"} Oct 07 19:32:05 crc kubenswrapper[4813]: I1007 19:32:05.433876 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-fgdgs" Oct 07 19:32:05 crc kubenswrapper[4813]: I1007 19:32:05.589267 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-zpbgs" Oct 07 19:32:05 crc kubenswrapper[4813]: I1007 19:32:05.794918 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-mtpdz" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.198773 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-x9rm6" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.405648 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6f96f8c84-vv8xc" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.420720 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-j2xrx" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.445005 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-775776c574-s879n" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.510061 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-74665f6cdc-5p9hd" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.567312 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5dd4499c96-nhkpb" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.853953 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" event={"ID":"8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8","Type":"ContainerStarted","Data":"71b610ff37170c6e5a8e736de6be98e6d47b9924ee444d15c4b49bfc31d3f46a"} Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.854530 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.856072 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" event={"ID":"60dd68e0-dc15-4515-aab8-91f2cbd44487","Type":"ContainerStarted","Data":"8c50baa28ad1d953263eebd5b6e13456098c161d5373b3198d74a24e564ce5b3"} Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.856263 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.878600 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" podStartSLOduration=3.136000914 podStartE2EDuration="41.878582157s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:27.785467879 +0000 UTC m=+813.863723490" lastFinishedPulling="2025-10-07 19:32:06.528049122 +0000 UTC m=+852.606304733" observedRunningTime="2025-10-07 19:32:06.87358778 +0000 UTC m=+852.951843401" watchObservedRunningTime="2025-10-07 19:32:06.878582157 +0000 UTC m=+852.956837778" Oct 07 19:32:06 crc kubenswrapper[4813]: I1007 19:32:06.897257 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" podStartSLOduration=3.7369199379999998 podStartE2EDuration="41.897233327s" podCreationTimestamp="2025-10-07 19:31:25 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.368361922 +0000 UTC m=+814.446617533" lastFinishedPulling="2025-10-07 19:32:06.528675311 +0000 UTC m=+852.606930922" observedRunningTime="2025-10-07 19:32:06.890833678 +0000 UTC m=+852.969089299" watchObservedRunningTime="2025-10-07 19:32:06.897233327 +0000 UTC m=+852.975488938" Oct 07 19:32:07 crc kubenswrapper[4813]: I1007 19:32:07.790753 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q" Oct 07 19:32:07 crc kubenswrapper[4813]: I1007 19:32:07.872405 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerStarted","Data":"2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a"} Oct 07 19:32:07 crc kubenswrapper[4813]: I1007 19:32:07.879007 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wl725" event={"ID":"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb","Type":"ContainerStarted","Data":"0baf1fc77243d583856749c01d443c5944cf627e5fca9a0baff8c61648c5853b"} Oct 07 19:32:07 crc kubenswrapper[4813]: I1007 19:32:07.883527 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerStarted","Data":"eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079"} Oct 07 19:32:07 crc kubenswrapper[4813]: I1007 19:32:07.899748 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w89rt" podStartSLOduration=23.539241179 podStartE2EDuration="29.899733806s" podCreationTimestamp="2025-10-07 19:31:38 +0000 UTC" firstStartedPulling="2025-10-07 19:32:00.63147517 +0000 UTC m=+846.709730781" lastFinishedPulling="2025-10-07 19:32:06.991967797 +0000 UTC m=+853.070223408" observedRunningTime="2025-10-07 19:32:07.894354867 +0000 UTC m=+853.972610478" watchObservedRunningTime="2025-10-07 19:32:07.899733806 +0000 UTC m=+853.977989417" Oct 07 19:32:07 crc kubenswrapper[4813]: I1007 19:32:07.944550 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nlhtj" podStartSLOduration=12.943577653 podStartE2EDuration="19.944534215s" podCreationTimestamp="2025-10-07 19:31:48 +0000 UTC" firstStartedPulling="2025-10-07 19:32:00.36902822 +0000 UTC m=+846.447283831" lastFinishedPulling="2025-10-07 19:32:07.369984782 +0000 UTC m=+853.448240393" observedRunningTime="2025-10-07 19:32:07.922095924 +0000 UTC m=+854.000351545" watchObservedRunningTime="2025-10-07 19:32:07.944534215 +0000 UTC m=+854.022789826" Oct 07 19:32:08 crc kubenswrapper[4813]: I1007 19:32:08.890725 4813 generic.go:334] "Generic (PLEG): container finished" podID="6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb" containerID="0baf1fc77243d583856749c01d443c5944cf627e5fca9a0baff8c61648c5853b" exitCode=0 Oct 07 19:32:08 crc kubenswrapper[4813]: I1007 19:32:08.890744 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wl725" event={"ID":"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb","Type":"ContainerDied","Data":"0baf1fc77243d583856749c01d443c5944cf627e5fca9a0baff8c61648c5853b"} Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.201175 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.201252 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.240573 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.256038 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.256084 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.898493 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wl725" event={"ID":"6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb","Type":"ContainerStarted","Data":"70b6597e9c23cae707f08f935c647169301ad08424dfdb34f1ac32db8b79e0c0"} Oct 07 19:32:09 crc kubenswrapper[4813]: I1007 19:32:09.919976 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wl725" podStartSLOduration=3.632915326 podStartE2EDuration="39.91995669s" podCreationTimestamp="2025-10-07 19:31:30 +0000 UTC" firstStartedPulling="2025-10-07 19:31:33.014029147 +0000 UTC m=+819.092284758" lastFinishedPulling="2025-10-07 19:32:09.301070511 +0000 UTC m=+855.379326122" observedRunningTime="2025-10-07 19:32:09.917941511 +0000 UTC m=+855.996197122" watchObservedRunningTime="2025-10-07 19:32:09.91995669 +0000 UTC m=+855.998212321" Oct 07 19:32:10 crc kubenswrapper[4813]: I1007 19:32:10.296192 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlhtj" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="registry-server" probeResult="failure" output=< Oct 07 19:32:10 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:32:10 crc kubenswrapper[4813]: > Oct 07 19:32:10 crc kubenswrapper[4813]: E1007 19:32:10.604936 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" podUID="58a86259-bcad-428f-9d1d-5e8c059403a8" Oct 07 19:32:11 crc kubenswrapper[4813]: I1007 19:32:11.046455 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wl725" Oct 07 19:32:11 crc kubenswrapper[4813]: I1007 19:32:11.046528 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wl725" Oct 07 19:32:12 crc kubenswrapper[4813]: I1007 19:32:12.097748 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wl725" podUID="6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb" containerName="registry-server" probeResult="failure" output=< Oct 07 19:32:12 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:32:12 crc kubenswrapper[4813]: > Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.423975 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-m49nk" Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.489258 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-ndtgw" Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.519582 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-lk6lc" Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.845008 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-662qq" Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.869922 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-kjl4l" Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.901050 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-59578bc799-v6ggr" Oct 07 19:32:15 crc kubenswrapper[4813]: I1007 19:32:15.973962 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-6xkm2" Oct 07 19:32:16 crc kubenswrapper[4813]: I1007 19:32:16.046381 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xzgn5" Oct 07 19:32:16 crc kubenswrapper[4813]: I1007 19:32:16.127585 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7m9t5" Oct 07 19:32:16 crc kubenswrapper[4813]: I1007 19:32:16.231839 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-585fc5b659-gngt9" Oct 07 19:32:16 crc kubenswrapper[4813]: I1007 19:32:16.403701 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-664664cb68-rhpjg" Oct 07 19:32:19 crc kubenswrapper[4813]: I1007 19:32:19.255073 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:32:19 crc kubenswrapper[4813]: I1007 19:32:19.313245 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w89rt"] Oct 07 19:32:19 crc kubenswrapper[4813]: I1007 19:32:19.983172 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-w89rt" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="registry-server" containerID="cri-o://2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a" gracePeriod=2 Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.297772 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nlhtj" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="registry-server" probeResult="failure" output=< Oct 07 19:32:20 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:32:20 crc kubenswrapper[4813]: > Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.461206 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.552854 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-catalog-content\") pod \"bc9aee39-39c8-4956-b050-8bf23e5617cf\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.552918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-utilities\") pod \"bc9aee39-39c8-4956-b050-8bf23e5617cf\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.552939 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rgkn\" (UniqueName: \"kubernetes.io/projected/bc9aee39-39c8-4956-b050-8bf23e5617cf-kube-api-access-7rgkn\") pod \"bc9aee39-39c8-4956-b050-8bf23e5617cf\" (UID: \"bc9aee39-39c8-4956-b050-8bf23e5617cf\") " Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.553631 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-utilities" (OuterVolumeSpecName: "utilities") pod "bc9aee39-39c8-4956-b050-8bf23e5617cf" (UID: "bc9aee39-39c8-4956-b050-8bf23e5617cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.558523 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc9aee39-39c8-4956-b050-8bf23e5617cf-kube-api-access-7rgkn" (OuterVolumeSpecName: "kube-api-access-7rgkn") pod "bc9aee39-39c8-4956-b050-8bf23e5617cf" (UID: "bc9aee39-39c8-4956-b050-8bf23e5617cf"). InnerVolumeSpecName "kube-api-access-7rgkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.597576 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc9aee39-39c8-4956-b050-8bf23e5617cf" (UID: "bc9aee39-39c8-4956-b050-8bf23e5617cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.655151 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.655183 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc9aee39-39c8-4956-b050-8bf23e5617cf-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.655198 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rgkn\" (UniqueName: \"kubernetes.io/projected/bc9aee39-39c8-4956-b050-8bf23e5617cf-kube-api-access-7rgkn\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.991755 4813 generic.go:334] "Generic (PLEG): container finished" podID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerID="2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a" exitCode=0 Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.992003 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerDied","Data":"2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a"} Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.992102 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w89rt" event={"ID":"bc9aee39-39c8-4956-b050-8bf23e5617cf","Type":"ContainerDied","Data":"bddc19156e76fe06b8c30416f7d17496b74b2e351e494054c054df3e82983524"} Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.992177 4813 scope.go:117] "RemoveContainer" containerID="2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a" Oct 07 19:32:20 crc kubenswrapper[4813]: I1007 19:32:20.992048 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w89rt" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.024098 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w89rt"] Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.030959 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-w89rt"] Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.037041 4813 scope.go:117] "RemoveContainer" containerID="8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.061551 4813 scope.go:117] "RemoveContainer" containerID="2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.088515 4813 scope.go:117] "RemoveContainer" containerID="2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a" Oct 07 19:32:21 crc kubenswrapper[4813]: E1007 19:32:21.089000 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a\": container with ID starting with 2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a not found: ID does not exist" containerID="2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.089040 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a"} err="failed to get container status \"2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a\": rpc error: code = NotFound desc = could not find container \"2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a\": container with ID starting with 2dc1acd3b8223e55d427930ddb5818178ed2869199bcf639a85b23d576a9ae3a not found: ID does not exist" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.089066 4813 scope.go:117] "RemoveContainer" containerID="8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42" Oct 07 19:32:21 crc kubenswrapper[4813]: E1007 19:32:21.089501 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42\": container with ID starting with 8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42 not found: ID does not exist" containerID="8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.089543 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42"} err="failed to get container status \"8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42\": rpc error: code = NotFound desc = could not find container \"8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42\": container with ID starting with 8e016807b83de6f3ee7a6eef9403f2f81e4b91cf1f1560534cc8976796448f42 not found: ID does not exist" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.089574 4813 scope.go:117] "RemoveContainer" containerID="2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b" Oct 07 19:32:21 crc kubenswrapper[4813]: E1007 19:32:21.089798 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b\": container with ID starting with 2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b not found: ID does not exist" containerID="2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.089823 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b"} err="failed to get container status \"2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b\": rpc error: code = NotFound desc = could not find container \"2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b\": container with ID starting with 2f0dc1247da0aacf446c09e5766d48a40ca7cb251abbc579a8b5d0cfc10e710b not found: ID does not exist" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.112949 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wl725" Oct 07 19:32:21 crc kubenswrapper[4813]: I1007 19:32:21.165927 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wl725" Oct 07 19:32:22 crc kubenswrapper[4813]: I1007 19:32:22.619562 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:32:22 crc kubenswrapper[4813]: I1007 19:32:22.641605 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" path="/var/lib/kubelet/pods/bc9aee39-39c8-4956-b050-8bf23e5617cf/volumes" Oct 07 19:32:23 crc kubenswrapper[4813]: I1007 19:32:23.143637 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wl725"] Oct 07 19:32:23 crc kubenswrapper[4813]: I1007 19:32:23.510830 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kzf7b"] Oct 07 19:32:23 crc kubenswrapper[4813]: I1007 19:32:23.511082 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kzf7b" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="registry-server" containerID="cri-o://2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671" gracePeriod=2 Oct 07 19:32:23 crc kubenswrapper[4813]: I1007 19:32:23.939346 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.002466 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-catalog-content\") pod \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.002510 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8zhz\" (UniqueName: \"kubernetes.io/projected/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-kube-api-access-c8zhz\") pod \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.002555 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-utilities\") pod \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\" (UID: \"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0\") " Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.003140 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-utilities" (OuterVolumeSpecName: "utilities") pod "9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" (UID: "9c5e5f03-66cc-4aa8-8dfe-051fca3285e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.009654 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-kube-api-access-c8zhz" (OuterVolumeSpecName: "kube-api-access-c8zhz") pod "9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" (UID: "9c5e5f03-66cc-4aa8-8dfe-051fca3285e0"). InnerVolumeSpecName "kube-api-access-c8zhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.017623 4813 generic.go:334] "Generic (PLEG): container finished" podID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerID="2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671" exitCode=0 Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.017664 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kzf7b" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.017686 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kzf7b" event={"ID":"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0","Type":"ContainerDied","Data":"2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671"} Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.017716 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kzf7b" event={"ID":"9c5e5f03-66cc-4aa8-8dfe-051fca3285e0","Type":"ContainerDied","Data":"32a6fab9b90e8136256e5e8ddb9d9834d332d36eae71dd9e9a14c4b1161c7b61"} Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.017732 4813 scope.go:117] "RemoveContainer" containerID="2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.021815 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" event={"ID":"58a86259-bcad-428f-9d1d-5e8c059403a8","Type":"ContainerStarted","Data":"88962eaa725f631413b413ba9ddcdda5fa8ce87a0a992043024e122e8eaf706d"} Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.047461 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5" podStartSLOduration=3.492760202 podStartE2EDuration="58.047439904s" podCreationTimestamp="2025-10-07 19:31:26 +0000 UTC" firstStartedPulling="2025-10-07 19:31:28.643711813 +0000 UTC m=+814.721967424" lastFinishedPulling="2025-10-07 19:32:23.198391515 +0000 UTC m=+869.276647126" observedRunningTime="2025-10-07 19:32:24.036833932 +0000 UTC m=+870.115089553" watchObservedRunningTime="2025-10-07 19:32:24.047439904 +0000 UTC m=+870.125695515" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.056340 4813 scope.go:117] "RemoveContainer" containerID="6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.070695 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" (UID: "9c5e5f03-66cc-4aa8-8dfe-051fca3285e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.083177 4813 scope.go:117] "RemoveContainer" containerID="4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.098943 4813 scope.go:117] "RemoveContainer" containerID="2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671" Oct 07 19:32:24 crc kubenswrapper[4813]: E1007 19:32:24.099754 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671\": container with ID starting with 2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671 not found: ID does not exist" containerID="2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.099794 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671"} err="failed to get container status \"2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671\": rpc error: code = NotFound desc = could not find container \"2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671\": container with ID starting with 2087d382ad73d546cd60539b330191d84a69a019bddeba6c1a0c7a39ac77f671 not found: ID does not exist" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.099836 4813 scope.go:117] "RemoveContainer" containerID="6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e" Oct 07 19:32:24 crc kubenswrapper[4813]: E1007 19:32:24.100191 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e\": container with ID starting with 6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e not found: ID does not exist" containerID="6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.100233 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e"} err="failed to get container status \"6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e\": rpc error: code = NotFound desc = could not find container \"6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e\": container with ID starting with 6b966a47b5ca6f35c698cff8e4ba026654654ee8b2f3ca2e6ab2c137e326666e not found: ID does not exist" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.100249 4813 scope.go:117] "RemoveContainer" containerID="4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda" Oct 07 19:32:24 crc kubenswrapper[4813]: E1007 19:32:24.101023 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda\": container with ID starting with 4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda not found: ID does not exist" containerID="4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.101060 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda"} err="failed to get container status \"4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda\": rpc error: code = NotFound desc = could not find container \"4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda\": container with ID starting with 4e611b67893656aa0945a18f4df7ae22babd15a5b6739b9b86d3852f6ce62fda not found: ID does not exist" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.104299 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8zhz\" (UniqueName: \"kubernetes.io/projected/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-kube-api-access-c8zhz\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.104344 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.104354 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.347510 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kzf7b"] Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.352246 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kzf7b"] Oct 07 19:32:24 crc kubenswrapper[4813]: I1007 19:32:24.612524 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" path="/var/lib/kubelet/pods/9c5e5f03-66cc-4aa8-8dfe-051fca3285e0/volumes" Oct 07 19:32:29 crc kubenswrapper[4813]: I1007 19:32:29.336803 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:32:29 crc kubenswrapper[4813]: I1007 19:32:29.414109 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:32:29 crc kubenswrapper[4813]: I1007 19:32:29.723287 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nlhtj"] Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.077011 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nlhtj" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="registry-server" containerID="cri-o://eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079" gracePeriod=2 Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.487777 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.635159 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-catalog-content\") pod \"9a89425b-9503-4d37-88f9-1be656146b03\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.635262 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-utilities\") pod \"9a89425b-9503-4d37-88f9-1be656146b03\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.635296 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4mrn\" (UniqueName: \"kubernetes.io/projected/9a89425b-9503-4d37-88f9-1be656146b03-kube-api-access-s4mrn\") pod \"9a89425b-9503-4d37-88f9-1be656146b03\" (UID: \"9a89425b-9503-4d37-88f9-1be656146b03\") " Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.635963 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-utilities" (OuterVolumeSpecName: "utilities") pod "9a89425b-9503-4d37-88f9-1be656146b03" (UID: "9a89425b-9503-4d37-88f9-1be656146b03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.641411 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a89425b-9503-4d37-88f9-1be656146b03-kube-api-access-s4mrn" (OuterVolumeSpecName: "kube-api-access-s4mrn") pod "9a89425b-9503-4d37-88f9-1be656146b03" (UID: "9a89425b-9503-4d37-88f9-1be656146b03"). InnerVolumeSpecName "kube-api-access-s4mrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.737712 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.737820 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4mrn\" (UniqueName: \"kubernetes.io/projected/9a89425b-9503-4d37-88f9-1be656146b03-kube-api-access-s4mrn\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.783257 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a89425b-9503-4d37-88f9-1be656146b03" (UID: "9a89425b-9503-4d37-88f9-1be656146b03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:32:31 crc kubenswrapper[4813]: I1007 19:32:31.839821 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a89425b-9503-4d37-88f9-1be656146b03-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.087099 4813 generic.go:334] "Generic (PLEG): container finished" podID="9a89425b-9503-4d37-88f9-1be656146b03" containerID="eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079" exitCode=0 Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.087145 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerDied","Data":"eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079"} Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.087176 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nlhtj" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.087200 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nlhtj" event={"ID":"9a89425b-9503-4d37-88f9-1be656146b03","Type":"ContainerDied","Data":"c82782c156f5eb61acaae776dc18f511112020d7b6343f8cec9c6cdec4ea1fbe"} Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.087226 4813 scope.go:117] "RemoveContainer" containerID="eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.109408 4813 scope.go:117] "RemoveContainer" containerID="3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.132751 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nlhtj"] Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.140394 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nlhtj"] Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.151979 4813 scope.go:117] "RemoveContainer" containerID="a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.170313 4813 scope.go:117] "RemoveContainer" containerID="eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079" Oct 07 19:32:32 crc kubenswrapper[4813]: E1007 19:32:32.170864 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079\": container with ID starting with eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079 not found: ID does not exist" containerID="eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.170928 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079"} err="failed to get container status \"eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079\": rpc error: code = NotFound desc = could not find container \"eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079\": container with ID starting with eb34308e53ef4c1778189367094ef1c06f90591a1bb15c1b8baf11a6b0d46079 not found: ID does not exist" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.170955 4813 scope.go:117] "RemoveContainer" containerID="3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a" Oct 07 19:32:32 crc kubenswrapper[4813]: E1007 19:32:32.171408 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a\": container with ID starting with 3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a not found: ID does not exist" containerID="3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.171437 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a"} err="failed to get container status \"3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a\": rpc error: code = NotFound desc = could not find container \"3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a\": container with ID starting with 3e228f599c26756f8dcf9454edc88466143b3da650eb11b892e2b881a4c9b83a not found: ID does not exist" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.171457 4813 scope.go:117] "RemoveContainer" containerID="a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3" Oct 07 19:32:32 crc kubenswrapper[4813]: E1007 19:32:32.171856 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3\": container with ID starting with a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3 not found: ID does not exist" containerID="a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.171922 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3"} err="failed to get container status \"a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3\": rpc error: code = NotFound desc = could not find container \"a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3\": container with ID starting with a0410c252c1d36a23f838bda3b343598ab1739a4d489c24d209eb1ea5fb8d7f3 not found: ID does not exist" Oct 07 19:32:32 crc kubenswrapper[4813]: I1007 19:32:32.620529 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a89425b-9503-4d37-88f9-1be656146b03" path="/var/lib/kubelet/pods/9a89425b-9503-4d37-88f9-1be656146b03/volumes" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.659677 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tpw6x"] Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660592 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660611 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660622 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660629 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660647 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660658 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660674 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660682 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660699 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660707 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660717 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660725 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660735 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660742 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660757 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660764 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660784 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660793 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="extract-content" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660805 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660813 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660826 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660835 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: E1007 19:32:40.660848 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.660856 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="extract-utilities" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.661042 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a89425b-9503-4d37-88f9-1be656146b03" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.661062 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8291dfc0-99c2-4184-bbe8-dcd060f0f69b" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.661072 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc9aee39-39c8-4956-b050-8bf23e5617cf" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.661090 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c5e5f03-66cc-4aa8-8dfe-051fca3285e0" containerName="registry-server" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.661956 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.665383 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.665440 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.665882 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-cp7fk" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.666317 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.681173 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tpw6x"] Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.728402 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-nlrjh"] Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.729467 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.731293 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.744201 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-nlrjh"] Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.774426 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrknv\" (UniqueName: \"kubernetes.io/projected/4f19523b-9140-47af-b6cf-ed858867ed62-kube-api-access-mrknv\") pod \"dnsmasq-dns-675f4bcbfc-tpw6x\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.774467 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f19523b-9140-47af-b6cf-ed858867ed62-config\") pod \"dnsmasq-dns-675f4bcbfc-tpw6x\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.875983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-config\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.876101 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.876176 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrknv\" (UniqueName: \"kubernetes.io/projected/4f19523b-9140-47af-b6cf-ed858867ed62-kube-api-access-mrknv\") pod \"dnsmasq-dns-675f4bcbfc-tpw6x\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.876222 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f19523b-9140-47af-b6cf-ed858867ed62-config\") pod \"dnsmasq-dns-675f4bcbfc-tpw6x\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.876257 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl62z\" (UniqueName: \"kubernetes.io/projected/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-kube-api-access-nl62z\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.877204 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f19523b-9140-47af-b6cf-ed858867ed62-config\") pod \"dnsmasq-dns-675f4bcbfc-tpw6x\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.897110 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrknv\" (UniqueName: \"kubernetes.io/projected/4f19523b-9140-47af-b6cf-ed858867ed62-kube-api-access-mrknv\") pod \"dnsmasq-dns-675f4bcbfc-tpw6x\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.977471 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.977777 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-config\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.977847 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.977902 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl62z\" (UniqueName: \"kubernetes.io/projected/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-kube-api-access-nl62z\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.978552 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:40 crc kubenswrapper[4813]: I1007 19:32:40.978618 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-config\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:41 crc kubenswrapper[4813]: I1007 19:32:41.004690 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl62z\" (UniqueName: \"kubernetes.io/projected/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-kube-api-access-nl62z\") pod \"dnsmasq-dns-78dd6ddcc-nlrjh\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:41 crc kubenswrapper[4813]: I1007 19:32:41.043067 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:32:41 crc kubenswrapper[4813]: W1007 19:32:41.453421 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f19523b_9140_47af_b6cf_ed858867ed62.slice/crio-521524eb3aa16e8045c43b40199d8b35b24582b5085d8123ca57163cdaa9362a WatchSource:0}: Error finding container 521524eb3aa16e8045c43b40199d8b35b24582b5085d8123ca57163cdaa9362a: Status 404 returned error can't find the container with id 521524eb3aa16e8045c43b40199d8b35b24582b5085d8123ca57163cdaa9362a Oct 07 19:32:41 crc kubenswrapper[4813]: I1007 19:32:41.463965 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tpw6x"] Oct 07 19:32:41 crc kubenswrapper[4813]: I1007 19:32:41.541766 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-nlrjh"] Oct 07 19:32:41 crc kubenswrapper[4813]: W1007 19:32:41.541821 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a9bb4f0_e3d8_4b42_8d2a_e8645e401741.slice/crio-b39059be4c298602def63e910590a9330a46d5097367d0ff8acb3d28b2e7074d WatchSource:0}: Error finding container b39059be4c298602def63e910590a9330a46d5097367d0ff8acb3d28b2e7074d: Status 404 returned error can't find the container with id b39059be4c298602def63e910590a9330a46d5097367d0ff8acb3d28b2e7074d Oct 07 19:32:42 crc kubenswrapper[4813]: I1007 19:32:42.178872 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" event={"ID":"4f19523b-9140-47af-b6cf-ed858867ed62","Type":"ContainerStarted","Data":"521524eb3aa16e8045c43b40199d8b35b24582b5085d8123ca57163cdaa9362a"} Oct 07 19:32:42 crc kubenswrapper[4813]: I1007 19:32:42.181202 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" event={"ID":"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741","Type":"ContainerStarted","Data":"b39059be4c298602def63e910590a9330a46d5097367d0ff8acb3d28b2e7074d"} Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.662471 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tpw6x"] Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.689965 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-k4522"] Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.703021 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.726124 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-k4522"] Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.819616 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnjsg\" (UniqueName: \"kubernetes.io/projected/e55b2830-690d-4fcb-ab00-e619a514aa89-kube-api-access-pnjsg\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.819694 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-dns-svc\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.819728 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-config\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.920864 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnjsg\" (UniqueName: \"kubernetes.io/projected/e55b2830-690d-4fcb-ab00-e619a514aa89-kube-api-access-pnjsg\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.920925 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-dns-svc\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.920955 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-config\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.921892 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-config\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.922781 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-dns-svc\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:43 crc kubenswrapper[4813]: I1007 19:32:43.972542 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnjsg\" (UniqueName: \"kubernetes.io/projected/e55b2830-690d-4fcb-ab00-e619a514aa89-kube-api-access-pnjsg\") pod \"dnsmasq-dns-666b6646f7-k4522\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.051497 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.055382 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-nlrjh"] Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.117371 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mhnfj"] Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.118422 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.134783 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mhnfj"] Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.227819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.227868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgpr5\" (UniqueName: \"kubernetes.io/projected/9f1866f9-c802-4561-b716-040250f6dbc7-kube-api-access-xgpr5\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.227893 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-config\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.328884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgpr5\" (UniqueName: \"kubernetes.io/projected/9f1866f9-c802-4561-b716-040250f6dbc7-kube-api-access-xgpr5\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.328928 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-config\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.329021 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.329855 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.330049 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-config\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.359259 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgpr5\" (UniqueName: \"kubernetes.io/projected/9f1866f9-c802-4561-b716-040250f6dbc7-kube-api-access-xgpr5\") pod \"dnsmasq-dns-57d769cc4f-mhnfj\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.467419 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.733683 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-k4522"] Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.848544 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.849871 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.855122 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.855622 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.857174 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.857816 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.858553 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.858688 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4bdjx" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.862018 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.863581 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938346 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b693f559-87e2-41ef-94c0-56d76bd9ef00-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938417 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938472 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b693f559-87e2-41ef-94c0-56d76bd9ef00-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938538 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938560 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938631 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938651 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938742 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72lft\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-kube-api-access-72lft\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938761 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938776 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-config-data\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:44 crc kubenswrapper[4813]: I1007 19:32:44.938793 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.039739 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.039799 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.039828 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72lft\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-kube-api-access-72lft\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.039873 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-config-data\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.039890 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.039903 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.041538 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b693f559-87e2-41ef-94c0-56d76bd9ef00-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.040158 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.041581 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.041183 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-config-data\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.041676 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b693f559-87e2-41ef-94c0-56d76bd9ef00-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.041756 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.041775 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.042047 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.040901 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.042248 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.042416 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.054219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b693f559-87e2-41ef-94c0-56d76bd9ef00-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.054873 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b693f559-87e2-41ef-94c0-56d76bd9ef00-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.055399 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.058971 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72lft\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-kube-api-access-72lft\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.075615 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.085937 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.177758 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.235545 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-k4522" event={"ID":"e55b2830-690d-4fcb-ab00-e619a514aa89","Type":"ContainerStarted","Data":"847bef0ff7241c42dd04d49bc0b8bba6a8ba06ed72b0aa6ab086090437081eff"} Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.277589 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.279475 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.285520 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.285729 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.286377 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.286517 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.286561 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.286600 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.286963 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7pwf9" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.291150 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345614 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345677 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345696 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345717 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345732 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345747 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345763 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345778 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345810 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345841 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.345870 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmg87\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-kube-api-access-lmg87\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.361079 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mhnfj"] Oct 07 19:32:45 crc kubenswrapper[4813]: W1007 19:32:45.371509 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f1866f9_c802_4561_b716_040250f6dbc7.slice/crio-3020248de48783b0282b6931e94218ad01b41c5443403277600ab4b8301c9ac0 WatchSource:0}: Error finding container 3020248de48783b0282b6931e94218ad01b41c5443403277600ab4b8301c9ac0: Status 404 returned error can't find the container with id 3020248de48783b0282b6931e94218ad01b41c5443403277600ab4b8301c9ac0 Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446669 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446726 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446761 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmg87\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-kube-api-access-lmg87\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446786 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446819 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446836 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446857 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446882 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446921 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446940 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.446955 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.447151 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.447981 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.448279 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.448610 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.448894 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.450060 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.453608 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.454231 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.454229 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.454479 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.464014 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmg87\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-kube-api-access-lmg87\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.475294 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.602001 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:32:45 crc kubenswrapper[4813]: I1007 19:32:45.670033 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.201736 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.244410 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" event={"ID":"9f1866f9-c802-4561-b716-040250f6dbc7","Type":"ContainerStarted","Data":"3020248de48783b0282b6931e94218ad01b41c5443403277600ab4b8301c9ac0"} Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.247156 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49","Type":"ContainerStarted","Data":"3dabc2c7af752113205f6554fe9dab36531f22855b747048d63c02bbae9eab7a"} Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.264268 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b693f559-87e2-41ef-94c0-56d76bd9ef00","Type":"ContainerStarted","Data":"fb89fefcb27d4c77968203ac8d8a97b710bbefa2f187c404c12edeb144ed380b"} Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.912660 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.914694 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.923149 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.923439 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.923790 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.924262 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-7q6sv" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.924414 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.929659 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 07 19:32:46 crc kubenswrapper[4813]: I1007 19:32:46.932391 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.074804 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkqx8\" (UniqueName: \"kubernetes.io/projected/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-kube-api-access-pkqx8\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.074869 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.074898 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.074936 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.075123 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-config-data-default\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.075216 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-kolla-config\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.075238 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.075288 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-secrets\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.075427 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179301 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179389 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179419 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179456 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-config-data-default\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179483 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-kolla-config\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179499 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179528 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-secrets\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179580 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.179613 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkqx8\" (UniqueName: \"kubernetes.io/projected/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-kube-api-access-pkqx8\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.180765 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.181282 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-config-data-default\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.181585 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.181754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-kolla-config\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.186119 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.195372 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.214871 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-secrets\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.214983 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.233987 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkqx8\" (UniqueName: \"kubernetes.io/projected/93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b-kube-api-access-pkqx8\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.242834 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b\") " pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.535403 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.707062 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.717362 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.720883 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-fms22" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.722169 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.723108 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.723671 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.740227 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.905800 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.905849 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.905868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.905918 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.905940 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.905979 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.906004 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.906020 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:47 crc kubenswrapper[4813]: I1007 19:32:47.906034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lj5n\" (UniqueName: \"kubernetes.io/projected/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-kube-api-access-7lj5n\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009733 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009754 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lj5n\" (UniqueName: \"kubernetes.io/projected/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-kube-api-access-7lj5n\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009771 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009819 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009846 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009864 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009944 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.009974 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.010379 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.011146 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.011312 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.013023 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.014008 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.016891 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.017228 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.031201 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.042393 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.057845 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lj5n\" (UniqueName: \"kubernetes.io/projected/1a2d18a4-7c93-4743-8f3d-3367a4dd937a-kube-api-access-7lj5n\") pod \"openstack-cell1-galera-0\" (UID: \"1a2d18a4-7c93-4743-8f3d-3367a4dd937a\") " pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.093450 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.094315 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.098732 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.106004 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.108891 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-pq6gw" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.110908 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.213109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d330e133-a612-477b-afbd-2af06b9e084d-kolla-config\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.213184 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d330e133-a612-477b-afbd-2af06b9e084d-config-data\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.213351 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk4t9\" (UniqueName: \"kubernetes.io/projected/d330e133-a612-477b-afbd-2af06b9e084d-kube-api-access-xk4t9\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.213407 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d330e133-a612-477b-afbd-2af06b9e084d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.213428 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d330e133-a612-477b-afbd-2af06b9e084d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.315036 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk4t9\" (UniqueName: \"kubernetes.io/projected/d330e133-a612-477b-afbd-2af06b9e084d-kube-api-access-xk4t9\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.315135 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d330e133-a612-477b-afbd-2af06b9e084d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.315157 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d330e133-a612-477b-afbd-2af06b9e084d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.315199 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d330e133-a612-477b-afbd-2af06b9e084d-kolla-config\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.315219 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d330e133-a612-477b-afbd-2af06b9e084d-config-data\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.315954 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d330e133-a612-477b-afbd-2af06b9e084d-config-data\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.317453 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d330e133-a612-477b-afbd-2af06b9e084d-kolla-config\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.319266 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/d330e133-a612-477b-afbd-2af06b9e084d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.336196 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d330e133-a612-477b-afbd-2af06b9e084d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.350765 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.350991 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk4t9\" (UniqueName: \"kubernetes.io/projected/d330e133-a612-477b-afbd-2af06b9e084d-kube-api-access-xk4t9\") pod \"memcached-0\" (UID: \"d330e133-a612-477b-afbd-2af06b9e084d\") " pod="openstack/memcached-0" Oct 07 19:32:48 crc kubenswrapper[4813]: I1007 19:32:48.446300 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 07 19:32:49 crc kubenswrapper[4813]: I1007 19:32:49.833013 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:32:49 crc kubenswrapper[4813]: I1007 19:32:49.834277 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 19:32:49 crc kubenswrapper[4813]: I1007 19:32:49.836464 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-d46bl" Oct 07 19:32:49 crc kubenswrapper[4813]: I1007 19:32:49.849638 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:32:49 crc kubenswrapper[4813]: I1007 19:32:49.945906 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw7jq\" (UniqueName: \"kubernetes.io/projected/9e7e2083-472f-4551-840c-35943117bcb0-kube-api-access-xw7jq\") pod \"kube-state-metrics-0\" (UID: \"9e7e2083-472f-4551-840c-35943117bcb0\") " pod="openstack/kube-state-metrics-0" Oct 07 19:32:50 crc kubenswrapper[4813]: I1007 19:32:50.047032 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw7jq\" (UniqueName: \"kubernetes.io/projected/9e7e2083-472f-4551-840c-35943117bcb0-kube-api-access-xw7jq\") pod \"kube-state-metrics-0\" (UID: \"9e7e2083-472f-4551-840c-35943117bcb0\") " pod="openstack/kube-state-metrics-0" Oct 07 19:32:50 crc kubenswrapper[4813]: I1007 19:32:50.068207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw7jq\" (UniqueName: \"kubernetes.io/projected/9e7e2083-472f-4551-840c-35943117bcb0-kube-api-access-xw7jq\") pod \"kube-state-metrics-0\" (UID: \"9e7e2083-472f-4551-840c-35943117bcb0\") " pod="openstack/kube-state-metrics-0" Oct 07 19:32:50 crc kubenswrapper[4813]: I1007 19:32:50.162480 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 19:32:52 crc kubenswrapper[4813]: I1007 19:32:52.079642 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:32:52 crc kubenswrapper[4813]: I1007 19:32:52.079901 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.350419 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.352568 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.356312 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.356792 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-kfxg5" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.356976 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.357124 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.359385 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.368771 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.522957 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523412 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f3d5f7-8af0-4f42-ae53-bc7473860346-config\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523506 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523606 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qff4m\" (UniqueName: \"kubernetes.io/projected/b6f3d5f7-8af0-4f42-ae53-bc7473860346-kube-api-access-qff4m\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523676 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523727 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f3d5f7-8af0-4f42-ae53-bc7473860346-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523884 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.523958 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b6f3d5f7-8af0-4f42-ae53-bc7473860346-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625557 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625615 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f3d5f7-8af0-4f42-ae53-bc7473860346-config\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625650 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625695 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qff4m\" (UniqueName: \"kubernetes.io/projected/b6f3d5f7-8af0-4f42-ae53-bc7473860346-kube-api-access-qff4m\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625732 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625761 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f3d5f7-8af0-4f42-ae53-bc7473860346-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625802 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.625823 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b6f3d5f7-8af0-4f42-ae53-bc7473860346-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.626279 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b6f3d5f7-8af0-4f42-ae53-bc7473860346-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.627597 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.632230 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.632505 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.632668 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.639492 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6f3d5f7-8af0-4f42-ae53-bc7473860346-config\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.640066 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.640711 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6f3d5f7-8af0-4f42-ae53-bc7473860346-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.649591 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.650700 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.656997 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qff4m\" (UniqueName: \"kubernetes.io/projected/b6f3d5f7-8af0-4f42-ae53-bc7473860346-kube-api-access-qff4m\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.662373 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6f3d5f7-8af0-4f42-ae53-bc7473860346-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.682829 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b6f3d5f7-8af0-4f42-ae53-bc7473860346\") " pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.766910 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jd55f"] Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.768180 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.772170 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.772524 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-6ffkv" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.778471 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-dmq4j"] Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.779134 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.780490 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.786165 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jd55f"] Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.800972 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dmq4j"] Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930050 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47f8d464-3eaa-4ee5-ae74-c6339710ade0-scripts\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930095 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-run\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930116 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-run-ovn\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930133 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-log\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930280 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp5v2\" (UniqueName: \"kubernetes.io/projected/a162a130-6094-42c0-a3d1-489de4a7fac4-kube-api-access-jp5v2\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930351 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-log-ovn\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930380 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-lib\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a162a130-6094-42c0-a3d1-489de4a7fac4-ovn-controller-tls-certs\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930479 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl7kl\" (UniqueName: \"kubernetes.io/projected/47f8d464-3eaa-4ee5-ae74-c6339710ade0-kube-api-access-wl7kl\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930540 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-run\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930612 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a162a130-6094-42c0-a3d1-489de4a7fac4-scripts\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930694 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-etc-ovs\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.930750 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a162a130-6094-42c0-a3d1-489de4a7fac4-combined-ca-bundle\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.982059 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-kfxg5" Oct 07 19:32:54 crc kubenswrapper[4813]: I1007 19:32:54.991012 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032274 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp5v2\" (UniqueName: \"kubernetes.io/projected/a162a130-6094-42c0-a3d1-489de4a7fac4-kube-api-access-jp5v2\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032377 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-log-ovn\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032418 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-lib\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032465 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a162a130-6094-42c0-a3d1-489de4a7fac4-ovn-controller-tls-certs\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032508 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl7kl\" (UniqueName: \"kubernetes.io/projected/47f8d464-3eaa-4ee5-ae74-c6339710ade0-kube-api-access-wl7kl\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032572 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-run\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032615 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a162a130-6094-42c0-a3d1-489de4a7fac4-scripts\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032683 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-etc-ovs\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032721 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a162a130-6094-42c0-a3d1-489de4a7fac4-combined-ca-bundle\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032776 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47f8d464-3eaa-4ee5-ae74-c6339710ade0-scripts\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032826 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-run\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032855 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-run-ovn\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.032886 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-log\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.033178 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-lib\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.033277 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-log\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.033354 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-run\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.033673 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-log-ovn\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.033852 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-etc-ovs\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.034051 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/47f8d464-3eaa-4ee5-ae74-c6339710ade0-var-run\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.034235 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a162a130-6094-42c0-a3d1-489de4a7fac4-var-run-ovn\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.037289 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47f8d464-3eaa-4ee5-ae74-c6339710ade0-scripts\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.039110 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a162a130-6094-42c0-a3d1-489de4a7fac4-combined-ca-bundle\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.045903 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/a162a130-6094-42c0-a3d1-489de4a7fac4-ovn-controller-tls-certs\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.053198 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a162a130-6094-42c0-a3d1-489de4a7fac4-scripts\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.056643 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl7kl\" (UniqueName: \"kubernetes.io/projected/47f8d464-3eaa-4ee5-ae74-c6339710ade0-kube-api-access-wl7kl\") pod \"ovn-controller-ovs-dmq4j\" (UID: \"47f8d464-3eaa-4ee5-ae74-c6339710ade0\") " pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.056735 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp5v2\" (UniqueName: \"kubernetes.io/projected/a162a130-6094-42c0-a3d1-489de4a7fac4-kube-api-access-jp5v2\") pod \"ovn-controller-jd55f\" (UID: \"a162a130-6094-42c0-a3d1-489de4a7fac4\") " pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.104211 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f" Oct 07 19:32:55 crc kubenswrapper[4813]: I1007 19:32:55.121428 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.764737 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.767459 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.770307 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9vtlq" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.770455 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.770752 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.772841 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.775421 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.890898 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55f9cac2-ed84-40f8-8bca-f10c774814f7-config\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.890981 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/55f9cac2-ed84-40f8-8bca-f10c774814f7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.891178 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.891571 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55f9cac2-ed84-40f8-8bca-f10c774814f7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.891657 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tvgr\" (UniqueName: \"kubernetes.io/projected/55f9cac2-ed84-40f8-8bca-f10c774814f7-kube-api-access-6tvgr\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.891692 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.891713 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.891813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993150 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55f9cac2-ed84-40f8-8bca-f10c774814f7-config\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993230 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/55f9cac2-ed84-40f8-8bca-f10c774814f7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993279 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993311 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55f9cac2-ed84-40f8-8bca-f10c774814f7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993368 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tvgr\" (UniqueName: \"kubernetes.io/projected/55f9cac2-ed84-40f8-8bca-f10c774814f7-kube-api-access-6tvgr\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993395 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993427 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993460 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.993669 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.994582 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55f9cac2-ed84-40f8-8bca-f10c774814f7-config\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.996129 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/55f9cac2-ed84-40f8-8bca-f10c774814f7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.996162 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/55f9cac2-ed84-40f8-8bca-f10c774814f7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:57 crc kubenswrapper[4813]: I1007 19:32:57.998177 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:58 crc kubenswrapper[4813]: I1007 19:32:57.999214 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:58 crc kubenswrapper[4813]: I1007 19:32:58.001136 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/55f9cac2-ed84-40f8-8bca-f10c774814f7-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:58 crc kubenswrapper[4813]: I1007 19:32:58.013731 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tvgr\" (UniqueName: \"kubernetes.io/projected/55f9cac2-ed84-40f8-8bca-f10c774814f7-kube-api-access-6tvgr\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:58 crc kubenswrapper[4813]: I1007 19:32:58.030360 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"55f9cac2-ed84-40f8-8bca-f10c774814f7\") " pod="openstack/ovsdbserver-sb-0" Oct 07 19:32:58 crc kubenswrapper[4813]: I1007 19:32:58.094363 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.235189 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.236060 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lmg87,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.237284 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.281765 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.282031 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-72lft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(b693f559-87e2-41ef-94c0-56d76bd9ef00): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.283711 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.437751 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" Oct 07 19:33:03 crc kubenswrapper[4813]: E1007 19:33:03.438218 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" Oct 07 19:33:04 crc kubenswrapper[4813]: E1007 19:33:04.144025 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 07 19:33:04 crc kubenswrapper[4813]: E1007 19:33:04.144436 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mrknv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-tpw6x_openstack(4f19523b-9140-47af-b6cf-ed858867ed62): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:33:04 crc kubenswrapper[4813]: E1007 19:33:04.146459 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" podUID="4f19523b-9140-47af-b6cf-ed858867ed62" Oct 07 19:33:04 crc kubenswrapper[4813]: I1007 19:33:04.436586 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 07 19:33:04 crc kubenswrapper[4813]: W1007 19:33:04.622740 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a2d18a4_7c93_4743_8f3d_3367a4dd937a.slice/crio-42556e4f4a768650188a7d329d2705c6874aa8e199c5e8cefe286da0d4b4960c WatchSource:0}: Error finding container 42556e4f4a768650188a7d329d2705c6874aa8e199c5e8cefe286da0d4b4960c: Status 404 returned error can't find the container with id 42556e4f4a768650188a7d329d2705c6874aa8e199c5e8cefe286da0d4b4960c Oct 07 19:33:04 crc kubenswrapper[4813]: W1007 19:33:04.624415 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd330e133_a612_477b_afbd_2af06b9e084d.slice/crio-dde40faac5519c3ee4abe302a20912d0c926dc19796896fd0e8b3dcc8b9e3e2a WatchSource:0}: Error finding container dde40faac5519c3ee4abe302a20912d0c926dc19796896fd0e8b3dcc8b9e3e2a: Status 404 returned error can't find the container with id dde40faac5519c3ee4abe302a20912d0c926dc19796896fd0e8b3dcc8b9e3e2a Oct 07 19:33:04 crc kubenswrapper[4813]: I1007 19:33:04.625911 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 07 19:33:04 crc kubenswrapper[4813]: I1007 19:33:04.625942 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 07 19:33:04 crc kubenswrapper[4813]: I1007 19:33:04.807998 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jd55f"] Oct 07 19:33:04 crc kubenswrapper[4813]: I1007 19:33:04.986946 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.049712 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.231502 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-dmq4j"] Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.231762 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrknv\" (UniqueName: \"kubernetes.io/projected/4f19523b-9140-47af-b6cf-ed858867ed62-kube-api-access-mrknv\") pod \"4f19523b-9140-47af-b6cf-ed858867ed62\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.231920 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f19523b-9140-47af-b6cf-ed858867ed62-config\") pod \"4f19523b-9140-47af-b6cf-ed858867ed62\" (UID: \"4f19523b-9140-47af-b6cf-ed858867ed62\") " Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.232415 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f19523b-9140-47af-b6cf-ed858867ed62-config" (OuterVolumeSpecName: "config") pod "4f19523b-9140-47af-b6cf-ed858867ed62" (UID: "4f19523b-9140-47af-b6cf-ed858867ed62"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.238284 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f19523b-9140-47af-b6cf-ed858867ed62-kube-api-access-mrknv" (OuterVolumeSpecName: "kube-api-access-mrknv") pod "4f19523b-9140-47af-b6cf-ed858867ed62" (UID: "4f19523b-9140-47af-b6cf-ed858867ed62"). InnerVolumeSpecName "kube-api-access-mrknv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:05 crc kubenswrapper[4813]: W1007 19:33:05.245055 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47f8d464_3eaa_4ee5_ae74_c6339710ade0.slice/crio-12a528db8efe5f010220ee5fb518026c35ed2b586e3de5d31dfa71a88475c973 WatchSource:0}: Error finding container 12a528db8efe5f010220ee5fb518026c35ed2b586e3de5d31dfa71a88475c973: Status 404 returned error can't find the container with id 12a528db8efe5f010220ee5fb518026c35ed2b586e3de5d31dfa71a88475c973 Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.333546 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrknv\" (UniqueName: \"kubernetes.io/projected/4f19523b-9140-47af-b6cf-ed858867ed62-kube-api-access-mrknv\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.333875 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f19523b-9140-47af-b6cf-ed858867ed62-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.451438 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d330e133-a612-477b-afbd-2af06b9e084d","Type":"ContainerStarted","Data":"dde40faac5519c3ee4abe302a20912d0c926dc19796896fd0e8b3dcc8b9e3e2a"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.452510 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dmq4j" event={"ID":"47f8d464-3eaa-4ee5-ae74-c6339710ade0","Type":"ContainerStarted","Data":"12a528db8efe5f010220ee5fb518026c35ed2b586e3de5d31dfa71a88475c973"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.454384 4813 generic.go:334] "Generic (PLEG): container finished" podID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerID="9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af" exitCode=0 Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.454447 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-k4522" event={"ID":"e55b2830-690d-4fcb-ab00-e619a514aa89","Type":"ContainerDied","Data":"9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.457480 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9e7e2083-472f-4551-840c-35943117bcb0","Type":"ContainerStarted","Data":"ca78c531c220401fb7bdfefe1d3a270b79c2931e7c38e178a4d283c308a4c863"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.463607 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.463617 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-tpw6x" event={"ID":"4f19523b-9140-47af-b6cf-ed858867ed62","Type":"ContainerDied","Data":"521524eb3aa16e8045c43b40199d8b35b24582b5085d8123ca57163cdaa9362a"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.466662 4813 generic.go:334] "Generic (PLEG): container finished" podID="9f1866f9-c802-4561-b716-040250f6dbc7" containerID="b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3" exitCode=0 Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.466798 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" event={"ID":"9f1866f9-c802-4561-b716-040250f6dbc7","Type":"ContainerDied","Data":"b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.475131 4813 generic.go:334] "Generic (PLEG): container finished" podID="6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" containerID="3c27abace5bccf49e2694afca4d603c87e4f562c110f6d9e74aa0071bb86758b" exitCode=0 Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.475305 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" event={"ID":"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741","Type":"ContainerDied","Data":"3c27abace5bccf49e2694afca4d603c87e4f562c110f6d9e74aa0071bb86758b"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.492840 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b","Type":"ContainerStarted","Data":"e8e3333a36f5a934cde3c4d736190e1e93f2e61daae05c9ff417a0cd2cc1992e"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.495316 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f" event={"ID":"a162a130-6094-42c0-a3d1-489de4a7fac4","Type":"ContainerStarted","Data":"b02abe0cf180261d33ee152c595a6cf6f2ff7aefee45a2bee135115f1fd70d75"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.497128 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1a2d18a4-7c93-4743-8f3d-3367a4dd937a","Type":"ContainerStarted","Data":"42556e4f4a768650188a7d329d2705c6874aa8e199c5e8cefe286da0d4b4960c"} Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.607417 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tpw6x"] Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.611443 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-tpw6x"] Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.871459 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 07 19:33:05 crc kubenswrapper[4813]: W1007 19:33:05.993600 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6f3d5f7_8af0_4f42_ae53_bc7473860346.slice/crio-51dc35c7fa23d6b9476c1e1cd62f1e239f319b72fcf2eba650a808b6caac9cf5 WatchSource:0}: Error finding container 51dc35c7fa23d6b9476c1e1cd62f1e239f319b72fcf2eba650a808b6caac9cf5: Status 404 returned error can't find the container with id 51dc35c7fa23d6b9476c1e1cd62f1e239f319b72fcf2eba650a808b6caac9cf5 Oct 07 19:33:05 crc kubenswrapper[4813]: I1007 19:33:05.999258 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.038237 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 07 19:33:06 crc kubenswrapper[4813]: W1007 19:33:06.152164 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55f9cac2_ed84_40f8_8bca_f10c774814f7.slice/crio-1e7122f58d005cada2d66e3c1b0d942d9c9225e2cb73b165671b61a559d073d1 WatchSource:0}: Error finding container 1e7122f58d005cada2d66e3c1b0d942d9c9225e2cb73b165671b61a559d073d1: Status 404 returned error can't find the container with id 1e7122f58d005cada2d66e3c1b0d942d9c9225e2cb73b165671b61a559d073d1 Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.172748 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-config\") pod \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.172932 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-dns-svc\") pod \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.172952 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl62z\" (UniqueName: \"kubernetes.io/projected/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-kube-api-access-nl62z\") pod \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\" (UID: \"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741\") " Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.178170 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-kube-api-access-nl62z" (OuterVolumeSpecName: "kube-api-access-nl62z") pod "6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" (UID: "6a9bb4f0-e3d8-4b42-8d2a-e8645e401741"). InnerVolumeSpecName "kube-api-access-nl62z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.190825 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" (UID: "6a9bb4f0-e3d8-4b42-8d2a-e8645e401741"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.192012 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-config" (OuterVolumeSpecName: "config") pod "6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" (UID: "6a9bb4f0-e3d8-4b42-8d2a-e8645e401741"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.274818 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.274864 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl62z\" (UniqueName: \"kubernetes.io/projected/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-kube-api-access-nl62z\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.274877 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.506148 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-k4522" event={"ID":"e55b2830-690d-4fcb-ab00-e619a514aa89","Type":"ContainerStarted","Data":"948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6"} Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.506276 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.511619 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" event={"ID":"9f1866f9-c802-4561-b716-040250f6dbc7","Type":"ContainerStarted","Data":"33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324"} Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.511678 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.512866 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"55f9cac2-ed84-40f8-8bca-f10c774814f7","Type":"ContainerStarted","Data":"1e7122f58d005cada2d66e3c1b0d942d9c9225e2cb73b165671b61a559d073d1"} Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.514677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" event={"ID":"6a9bb4f0-e3d8-4b42-8d2a-e8645e401741","Type":"ContainerDied","Data":"b39059be4c298602def63e910590a9330a46d5097367d0ff8acb3d28b2e7074d"} Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.514716 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-nlrjh" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.514728 4813 scope.go:117] "RemoveContainer" containerID="3c27abace5bccf49e2694afca4d603c87e4f562c110f6d9e74aa0071bb86758b" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.516449 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b6f3d5f7-8af0-4f42-ae53-bc7473860346","Type":"ContainerStarted","Data":"51dc35c7fa23d6b9476c1e1cd62f1e239f319b72fcf2eba650a808b6caac9cf5"} Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.524086 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-k4522" podStartSLOduration=4.124370641 podStartE2EDuration="23.524066613s" podCreationTimestamp="2025-10-07 19:32:43 +0000 UTC" firstStartedPulling="2025-10-07 19:32:44.831056723 +0000 UTC m=+890.909312334" lastFinishedPulling="2025-10-07 19:33:04.230752695 +0000 UTC m=+910.309008306" observedRunningTime="2025-10-07 19:33:06.52141126 +0000 UTC m=+912.599666871" watchObservedRunningTime="2025-10-07 19:33:06.524066613 +0000 UTC m=+912.602322254" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.547435 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" podStartSLOduration=3.636926096 podStartE2EDuration="22.547392301s" podCreationTimestamp="2025-10-07 19:32:44 +0000 UTC" firstStartedPulling="2025-10-07 19:32:45.383555957 +0000 UTC m=+891.461811568" lastFinishedPulling="2025-10-07 19:33:04.294022162 +0000 UTC m=+910.372277773" observedRunningTime="2025-10-07 19:33:06.543146193 +0000 UTC m=+912.621401804" watchObservedRunningTime="2025-10-07 19:33:06.547392301 +0000 UTC m=+912.625647902" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.617389 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f19523b-9140-47af-b6cf-ed858867ed62" path="/var/lib/kubelet/pods/4f19523b-9140-47af-b6cf-ed858867ed62/volumes" Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.617705 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-nlrjh"] Oct 07 19:33:06 crc kubenswrapper[4813]: I1007 19:33:06.617729 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-nlrjh"] Oct 07 19:33:08 crc kubenswrapper[4813]: I1007 19:33:08.617635 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" path="/var/lib/kubelet/pods/6a9bb4f0-e3d8-4b42-8d2a-e8645e401741/volumes" Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.565531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1a2d18a4-7c93-4743-8f3d-3367a4dd937a","Type":"ContainerStarted","Data":"00778b7e328e4e6c82a5f86eccaa6675028bc121fd2d7b9477bddd70b32555d2"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.570261 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9e7e2083-472f-4551-840c-35943117bcb0","Type":"ContainerStarted","Data":"49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.570983 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.576141 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"55f9cac2-ed84-40f8-8bca-f10c774814f7","Type":"ContainerStarted","Data":"a1040b8967fdf172cb311885aacedeecdedf37cfd6d46f415c3ab6f6275fde6a"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.577843 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b","Type":"ContainerStarted","Data":"4f72846d865fc6afc88583d4dd43820d6fc867b55d72e58d39089d48269be528"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.579167 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"d330e133-a612-477b-afbd-2af06b9e084d","Type":"ContainerStarted","Data":"e7d6c5ff1042cb25029f4c4db76c31268a5858149bc2f46d6170a008856e9ea0"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.579218 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.580772 4813 generic.go:334] "Generic (PLEG): container finished" podID="47f8d464-3eaa-4ee5-ae74-c6339710ade0" containerID="55220a166167cf11f1204821a687d9de9f5b104bd10b62d46fdd46b1ed0c3ae6" exitCode=0 Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.580830 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dmq4j" event={"ID":"47f8d464-3eaa-4ee5-ae74-c6339710ade0","Type":"ContainerDied","Data":"55220a166167cf11f1204821a687d9de9f5b104bd10b62d46fdd46b1ed0c3ae6"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.582649 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f" event={"ID":"a162a130-6094-42c0-a3d1-489de4a7fac4","Type":"ContainerStarted","Data":"981065f2e93e9f1f40f6bafe678a52a39a9501513959021a9ad9125e664f81f1"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.582698 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jd55f" Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.584207 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b6f3d5f7-8af0-4f42-ae53-bc7473860346","Type":"ContainerStarted","Data":"dff6b51f953128a302cb3a7b8c2f207da9c7fe63636c255929ea32bef82707ed"} Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.642211 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=16.841624594 podStartE2EDuration="24.642191322s" podCreationTimestamp="2025-10-07 19:32:49 +0000 UTC" firstStartedPulling="2025-10-07 19:33:05.016196605 +0000 UTC m=+911.094452216" lastFinishedPulling="2025-10-07 19:33:12.816763333 +0000 UTC m=+918.895018944" observedRunningTime="2025-10-07 19:33:13.641649177 +0000 UTC m=+919.719904788" watchObservedRunningTime="2025-10-07 19:33:13.642191322 +0000 UTC m=+919.720446933" Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.696156 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jd55f" podStartSLOduration=12.35255212 podStartE2EDuration="19.69614298s" podCreationTimestamp="2025-10-07 19:32:54 +0000 UTC" firstStartedPulling="2025-10-07 19:33:04.82263123 +0000 UTC m=+910.900886841" lastFinishedPulling="2025-10-07 19:33:12.1662221 +0000 UTC m=+918.244477701" observedRunningTime="2025-10-07 19:33:13.691609675 +0000 UTC m=+919.769865286" watchObservedRunningTime="2025-10-07 19:33:13.69614298 +0000 UTC m=+919.774398581" Oct 07 19:33:13 crc kubenswrapper[4813]: I1007 19:33:13.711711 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=18.811997528 podStartE2EDuration="25.711695072s" podCreationTimestamp="2025-10-07 19:32:48 +0000 UTC" firstStartedPulling="2025-10-07 19:33:04.627907863 +0000 UTC m=+910.706163464" lastFinishedPulling="2025-10-07 19:33:11.527605397 +0000 UTC m=+917.605861008" observedRunningTime="2025-10-07 19:33:13.708485653 +0000 UTC m=+919.786741254" watchObservedRunningTime="2025-10-07 19:33:13.711695072 +0000 UTC m=+919.789950673" Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.053821 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.469458 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.532572 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-k4522"] Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.615937 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-k4522" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerName="dnsmasq-dns" containerID="cri-o://948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6" gracePeriod=10 Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.616798 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dmq4j" event={"ID":"47f8d464-3eaa-4ee5-ae74-c6339710ade0","Type":"ContainerStarted","Data":"a798d126a6a72d4afd4d452805a64f6ccb9c04856da14d0cc898e093fbb94006"} Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.616830 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.616841 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-dmq4j" event={"ID":"47f8d464-3eaa-4ee5-ae74-c6339710ade0","Type":"ContainerStarted","Data":"42aad5c1479edb5e0d5ecfa10b096acfaade4ddf0cabe674ac91a14c8990fb88"} Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.618925 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:33:14 crc kubenswrapper[4813]: I1007 19:33:14.689132 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-dmq4j" podStartSLOduration=14.319772815 podStartE2EDuration="20.689114202s" podCreationTimestamp="2025-10-07 19:32:54 +0000 UTC" firstStartedPulling="2025-10-07 19:33:05.247516268 +0000 UTC m=+911.325771879" lastFinishedPulling="2025-10-07 19:33:11.616857655 +0000 UTC m=+917.695113266" observedRunningTime="2025-10-07 19:33:14.686590622 +0000 UTC m=+920.764846233" watchObservedRunningTime="2025-10-07 19:33:14.689114202 +0000 UTC m=+920.767369813" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.141735 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.251887 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-dns-svc\") pod \"e55b2830-690d-4fcb-ab00-e619a514aa89\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.251930 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-config\") pod \"e55b2830-690d-4fcb-ab00-e619a514aa89\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.251963 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnjsg\" (UniqueName: \"kubernetes.io/projected/e55b2830-690d-4fcb-ab00-e619a514aa89-kube-api-access-pnjsg\") pod \"e55b2830-690d-4fcb-ab00-e619a514aa89\" (UID: \"e55b2830-690d-4fcb-ab00-e619a514aa89\") " Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.276468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e55b2830-690d-4fcb-ab00-e619a514aa89-kube-api-access-pnjsg" (OuterVolumeSpecName: "kube-api-access-pnjsg") pod "e55b2830-690d-4fcb-ab00-e619a514aa89" (UID: "e55b2830-690d-4fcb-ab00-e619a514aa89"). InnerVolumeSpecName "kube-api-access-pnjsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.292922 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-config" (OuterVolumeSpecName: "config") pod "e55b2830-690d-4fcb-ab00-e619a514aa89" (UID: "e55b2830-690d-4fcb-ab00-e619a514aa89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.311661 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e55b2830-690d-4fcb-ab00-e619a514aa89" (UID: "e55b2830-690d-4fcb-ab00-e619a514aa89"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.355801 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.355834 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e55b2830-690d-4fcb-ab00-e619a514aa89-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.355870 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnjsg\" (UniqueName: \"kubernetes.io/projected/e55b2830-690d-4fcb-ab00-e619a514aa89-kube-api-access-pnjsg\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.627105 4813 generic.go:334] "Generic (PLEG): container finished" podID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerID="948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6" exitCode=0 Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.627145 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-k4522" event={"ID":"e55b2830-690d-4fcb-ab00-e619a514aa89","Type":"ContainerDied","Data":"948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6"} Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.627185 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-k4522" event={"ID":"e55b2830-690d-4fcb-ab00-e619a514aa89","Type":"ContainerDied","Data":"847bef0ff7241c42dd04d49bc0b8bba6a8ba06ed72b0aa6ab086090437081eff"} Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.627194 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-k4522" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.627206 4813 scope.go:117] "RemoveContainer" containerID="948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.655266 4813 scope.go:117] "RemoveContainer" containerID="9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.662889 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-k4522"] Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.673922 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-k4522"] Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.674635 4813 scope.go:117] "RemoveContainer" containerID="948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6" Oct 07 19:33:15 crc kubenswrapper[4813]: E1007 19:33:15.675022 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6\": container with ID starting with 948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6 not found: ID does not exist" containerID="948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.675050 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6"} err="failed to get container status \"948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6\": rpc error: code = NotFound desc = could not find container \"948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6\": container with ID starting with 948948e3426dd74ab4f98880e70fd41c784ffdccfce1caf004739e8562044be6 not found: ID does not exist" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.675069 4813 scope.go:117] "RemoveContainer" containerID="9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af" Oct 07 19:33:15 crc kubenswrapper[4813]: E1007 19:33:15.675406 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af\": container with ID starting with 9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af not found: ID does not exist" containerID="9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af" Oct 07 19:33:15 crc kubenswrapper[4813]: I1007 19:33:15.675431 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af"} err="failed to get container status \"9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af\": rpc error: code = NotFound desc = could not find container \"9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af\": container with ID starting with 9831b99bf0f534db54a4126acfd668bfad9753f02dca419d244600b8b78897af not found: ID does not exist" Oct 07 19:33:16 crc kubenswrapper[4813]: I1007 19:33:16.619050 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" path="/var/lib/kubelet/pods/e55b2830-690d-4fcb-ab00-e619a514aa89/volumes" Oct 07 19:33:17 crc kubenswrapper[4813]: I1007 19:33:17.654748 4813 generic.go:334] "Generic (PLEG): container finished" podID="1a2d18a4-7c93-4743-8f3d-3367a4dd937a" containerID="00778b7e328e4e6c82a5f86eccaa6675028bc121fd2d7b9477bddd70b32555d2" exitCode=0 Oct 07 19:33:17 crc kubenswrapper[4813]: I1007 19:33:17.655160 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1a2d18a4-7c93-4743-8f3d-3367a4dd937a","Type":"ContainerDied","Data":"00778b7e328e4e6c82a5f86eccaa6675028bc121fd2d7b9477bddd70b32555d2"} Oct 07 19:33:17 crc kubenswrapper[4813]: I1007 19:33:17.657911 4813 generic.go:334] "Generic (PLEG): container finished" podID="93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b" containerID="4f72846d865fc6afc88583d4dd43820d6fc867b55d72e58d39089d48269be528" exitCode=0 Oct 07 19:33:17 crc kubenswrapper[4813]: I1007 19:33:17.657946 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b","Type":"ContainerDied","Data":"4f72846d865fc6afc88583d4dd43820d6fc867b55d72e58d39089d48269be528"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.447648 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.668084 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"55f9cac2-ed84-40f8-8bca-f10c774814f7","Type":"ContainerStarted","Data":"e8082d284405ada7216db0cee3ecd0ad8d343d6ce5cff769fb8c1d3d7c6f9f4a"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.669864 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b","Type":"ContainerStarted","Data":"e934de9a2217423e94559ebfd043c64a9a2e6cc932138ec4fa5b64f23a66fa29"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.671904 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b6f3d5f7-8af0-4f42-ae53-bc7473860346","Type":"ContainerStarted","Data":"6a89452850a8c91d083debdc80f3de6afb11ce1fc6bb3d102899772fccff21bc"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.673963 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1a2d18a4-7c93-4743-8f3d-3367a4dd937a","Type":"ContainerStarted","Data":"c86f4b423e018d346f8d92b12029a9b66742aea64e4f9aac4fc2136f57e22936"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.675498 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49","Type":"ContainerStarted","Data":"50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.677062 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b693f559-87e2-41ef-94c0-56d76bd9ef00","Type":"ContainerStarted","Data":"98789f437cadf7459a98615391b7fd39c5b25d988dcb21b731b947d853a5d811"} Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.698543 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=11.359767359 podStartE2EDuration="22.698526822s" podCreationTimestamp="2025-10-07 19:32:56 +0000 UTC" firstStartedPulling="2025-10-07 19:33:06.15551355 +0000 UTC m=+912.233769161" lastFinishedPulling="2025-10-07 19:33:17.494272973 +0000 UTC m=+923.572528624" observedRunningTime="2025-10-07 19:33:18.693562064 +0000 UTC m=+924.771817675" watchObservedRunningTime="2025-10-07 19:33:18.698526822 +0000 UTC m=+924.776782423" Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.752909 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.201972965 podStartE2EDuration="33.752890831s" podCreationTimestamp="2025-10-07 19:32:45 +0000 UTC" firstStartedPulling="2025-10-07 19:33:04.44520453 +0000 UTC m=+910.523460151" lastFinishedPulling="2025-10-07 19:33:11.996122416 +0000 UTC m=+918.074378017" observedRunningTime="2025-10-07 19:33:18.748619143 +0000 UTC m=+924.826874744" watchObservedRunningTime="2025-10-07 19:33:18.752890831 +0000 UTC m=+924.831146442" Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.792833 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=14.311160549 podStartE2EDuration="25.7928159s" podCreationTimestamp="2025-10-07 19:32:53 +0000 UTC" firstStartedPulling="2025-10-07 19:33:06.019701269 +0000 UTC m=+912.097956880" lastFinishedPulling="2025-10-07 19:33:17.50135662 +0000 UTC m=+923.579612231" observedRunningTime="2025-10-07 19:33:18.789871818 +0000 UTC m=+924.868127439" watchObservedRunningTime="2025-10-07 19:33:18.7928159 +0000 UTC m=+924.871071511" Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.815582 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.445281841 podStartE2EDuration="32.815566132s" podCreationTimestamp="2025-10-07 19:32:46 +0000 UTC" firstStartedPulling="2025-10-07 19:33:04.62598774 +0000 UTC m=+910.704243351" lastFinishedPulling="2025-10-07 19:33:11.996272031 +0000 UTC m=+918.074527642" observedRunningTime="2025-10-07 19:33:18.809229596 +0000 UTC m=+924.887485207" watchObservedRunningTime="2025-10-07 19:33:18.815566132 +0000 UTC m=+924.893821743" Oct 07 19:33:18 crc kubenswrapper[4813]: I1007 19:33:18.992190 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.040570 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.094837 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.130117 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.682919 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.682980 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.724935 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.729952 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.980294 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-9v9ww"] Oct 07 19:33:19 crc kubenswrapper[4813]: E1007 19:33:19.980589 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerName="dnsmasq-dns" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.980606 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerName="dnsmasq-dns" Oct 07 19:33:19 crc kubenswrapper[4813]: E1007 19:33:19.980620 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerName="init" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.980628 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerName="init" Oct 07 19:33:19 crc kubenswrapper[4813]: E1007 19:33:19.980657 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" containerName="init" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.980665 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" containerName="init" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.980805 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="e55b2830-690d-4fcb-ab00-e619a514aa89" containerName="dnsmasq-dns" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.980826 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a9bb4f0-e3d8-4b42-8d2a-e8645e401741" containerName="init" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.981617 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:19 crc kubenswrapper[4813]: I1007 19:33:19.989216 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.024121 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-9v9ww"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.164035 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs26r\" (UniqueName: \"kubernetes.io/projected/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-kube-api-access-qs26r\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.164387 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.164501 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.164605 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-config\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.267006 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.267294 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.267396 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-config\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.267441 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs26r\" (UniqueName: \"kubernetes.io/projected/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-kube-api-access-qs26r\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.268412 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-config\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.268648 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.269016 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.309693 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ww8fx"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.314342 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.320552 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.346947 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ww8fx"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.348493 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs26r\" (UniqueName: \"kubernetes.io/projected/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-kube-api-access-qs26r\") pod \"dnsmasq-dns-7fd796d7df-9v9ww\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.378296 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.471724 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-ovn-rundir\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.471788 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.471821 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-config\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.471870 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc5z4\" (UniqueName: \"kubernetes.io/projected/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-kube-api-access-nc5z4\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.471894 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-combined-ca-bundle\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.471925 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-ovs-rundir\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.518283 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.519510 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.534623 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.536142 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.536376 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.538566 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-2hpsz" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.573045 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-config\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.573317 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc5z4\" (UniqueName: \"kubernetes.io/projected/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-kube-api-access-nc5z4\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.573358 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-combined-ca-bundle\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.573390 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-ovs-rundir\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.573440 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-ovn-rundir\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.573475 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.575153 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-ovs-rundir\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.575492 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-ovn-rundir\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.575502 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-config\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.580870 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.592349 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-9v9ww"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.592852 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.619259 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-combined-ca-bundle\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.633747 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc5z4\" (UniqueName: \"kubernetes.io/projected/1276e9fd-662d-41f1-8c9d-05abbbfbf0a2-kube-api-access-nc5z4\") pod \"ovn-controller-metrics-ww8fx\" (UID: \"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2\") " pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.661010 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.672841 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ww8fx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.676004 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.676083 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.676171 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-scripts\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.676247 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czbmq\" (UniqueName: \"kubernetes.io/projected/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-kube-api-access-czbmq\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.676268 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-v5tcc"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.676409 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.685038 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.685186 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-config\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.686661 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.692041 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-v5tcc"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788240 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788286 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788335 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-scripts\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788364 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czbmq\" (UniqueName: \"kubernetes.io/projected/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-kube-api-access-czbmq\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788385 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788404 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788497 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-config\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788556 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788574 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788590 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-config\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.788610 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4z4x\" (UniqueName: \"kubernetes.io/projected/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-kube-api-access-v4z4x\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.790603 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-config\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.792063 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-scripts\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.792432 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.796701 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.796989 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.797235 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.848308 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czbmq\" (UniqueName: \"kubernetes.io/projected/d09b3567-cc2a-48cc-b1ea-b0c65fee032d-kube-api-access-czbmq\") pod \"ovn-northd-0\" (UID: \"d09b3567-cc2a-48cc-b1ea-b0c65fee032d\") " pod="openstack/ovn-northd-0" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.886723 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-v5tcc"] Oct 07 19:33:20 crc kubenswrapper[4813]: E1007 19:33:20.887271 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-v4z4x ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" podUID="55ae9a8b-228e-445e-9bbe-2b6a80d2f361" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.901219 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-config\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.901316 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.901357 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.901404 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4z4x\" (UniqueName: \"kubernetes.io/projected/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-kube-api-access-v4z4x\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.902775 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-config\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.903672 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.903866 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.910199 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-qmzgx"] Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.914633 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.923176 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.946802 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4z4x\" (UniqueName: \"kubernetes.io/projected/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-kube-api-access-v4z4x\") pod \"dnsmasq-dns-74f6f696b9-v5tcc\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:20 crc kubenswrapper[4813]: I1007 19:33:20.954173 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qmzgx"] Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.005427 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.005499 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.005537 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-dns-svc\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.005583 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-config\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.005618 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfv69\" (UniqueName: \"kubernetes.io/projected/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-kube-api-access-qfv69\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.106938 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-config\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.107194 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfv69\" (UniqueName: \"kubernetes.io/projected/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-kube-api-access-qfv69\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.107223 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.107265 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.107339 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-dns-svc\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.108074 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-dns-svc\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.108820 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.108975 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-config\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.109285 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.128924 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfv69\" (UniqueName: \"kubernetes.io/projected/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-kube-api-access-qfv69\") pod \"dnsmasq-dns-698758b865-qmzgx\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.132489 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.253053 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.332216 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-9v9ww"] Oct 07 19:33:21 crc kubenswrapper[4813]: W1007 19:33:21.346661 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6ba585c_1da9_427e_b7ad_9a5e5a697a76.slice/crio-11f24f03048a4c5029c3e4773c90eb9816a4329bd6b480b4de809cb2fd5fc750 WatchSource:0}: Error finding container 11f24f03048a4c5029c3e4773c90eb9816a4329bd6b480b4de809cb2fd5fc750: Status 404 returned error can't find the container with id 11f24f03048a4c5029c3e4773c90eb9816a4329bd6b480b4de809cb2fd5fc750 Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.488471 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ww8fx"] Oct 07 19:33:21 crc kubenswrapper[4813]: W1007 19:33:21.505657 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1276e9fd_662d_41f1_8c9d_05abbbfbf0a2.slice/crio-5fda15c5d8df1e6d0f6f68b705ffc8870f97f143af5ffadb6a678699d654d0bf WatchSource:0}: Error finding container 5fda15c5d8df1e6d0f6f68b705ffc8870f97f143af5ffadb6a678699d654d0bf: Status 404 returned error can't find the container with id 5fda15c5d8df1e6d0f6f68b705ffc8870f97f143af5ffadb6a678699d654d0bf Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.568713 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.731090 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.736954 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.741272 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.758841 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.758873 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.758947 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-xhfvl" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.761197 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.796580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" event={"ID":"b6ba585c-1da9-427e-b7ad-9a5e5a697a76","Type":"ContainerStarted","Data":"11f24f03048a4c5029c3e4773c90eb9816a4329bd6b480b4de809cb2fd5fc750"} Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.799247 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d09b3567-cc2a-48cc-b1ea-b0c65fee032d","Type":"ContainerStarted","Data":"986ffc6b868535b93f18bfdc89b10cd96193e8ec6c639500402c6654edb2c092"} Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.800824 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ww8fx" event={"ID":"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2","Type":"ContainerStarted","Data":"5fda15c5d8df1e6d0f6f68b705ffc8870f97f143af5ffadb6a678699d654d0bf"} Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.800887 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.810622 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.823284 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kznq2\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-kube-api-access-kznq2\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.823394 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.823446 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bd53a283-8633-435c-a910-ab9abccb5c0d-cache\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.823483 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.823555 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bd53a283-8633-435c-a910-ab9abccb5c0d-lock\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.861476 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qmzgx"] Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925094 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-config\") pod \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925181 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4z4x\" (UniqueName: \"kubernetes.io/projected/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-kube-api-access-v4z4x\") pod \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925259 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-ovsdbserver-nb\") pod \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925359 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-dns-svc\") pod \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\" (UID: \"55ae9a8b-228e-445e-9bbe-2b6a80d2f361\") " Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925619 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bd53a283-8633-435c-a910-ab9abccb5c0d-lock\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925696 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kznq2\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-kube-api-access-kznq2\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925735 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925824 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bd53a283-8633-435c-a910-ab9abccb5c0d-cache\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925883 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.926092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bd53a283-8633-435c-a910-ab9abccb5c0d-lock\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.926124 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.926207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bd53a283-8633-435c-a910-ab9abccb5c0d-cache\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925723 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-config" (OuterVolumeSpecName: "config") pod "55ae9a8b-228e-445e-9bbe-2b6a80d2f361" (UID: "55ae9a8b-228e-445e-9bbe-2b6a80d2f361"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925791 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "55ae9a8b-228e-445e-9bbe-2b6a80d2f361" (UID: "55ae9a8b-228e-445e-9bbe-2b6a80d2f361"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.925950 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "55ae9a8b-228e-445e-9bbe-2b6a80d2f361" (UID: "55ae9a8b-228e-445e-9bbe-2b6a80d2f361"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:21 crc kubenswrapper[4813]: E1007 19:33:21.926966 4813 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 19:33:21 crc kubenswrapper[4813]: E1007 19:33:21.926981 4813 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 19:33:21 crc kubenswrapper[4813]: E1007 19:33:21.927017 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift podName:bd53a283-8633-435c-a910-ab9abccb5c0d nodeName:}" failed. No retries permitted until 2025-10-07 19:33:22.427002907 +0000 UTC m=+928.505258518 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift") pod "swift-storage-0" (UID: "bd53a283-8633-435c-a910-ab9abccb5c0d") : configmap "swift-ring-files" not found Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.930112 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-kube-api-access-v4z4x" (OuterVolumeSpecName: "kube-api-access-v4z4x") pod "55ae9a8b-228e-445e-9bbe-2b6a80d2f361" (UID: "55ae9a8b-228e-445e-9bbe-2b6a80d2f361"). InnerVolumeSpecName "kube-api-access-v4z4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.946931 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kznq2\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-kube-api-access-kznq2\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:21 crc kubenswrapper[4813]: I1007 19:33:21.956800 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.027280 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.027506 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.027572 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4z4x\" (UniqueName: \"kubernetes.io/projected/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-kube-api-access-v4z4x\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.027628 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/55ae9a8b-228e-445e-9bbe-2b6a80d2f361-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.078353 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.078413 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.433527 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:22 crc kubenswrapper[4813]: E1007 19:33:22.434021 4813 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 19:33:22 crc kubenswrapper[4813]: E1007 19:33:22.434072 4813 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 19:33:22 crc kubenswrapper[4813]: E1007 19:33:22.434154 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift podName:bd53a283-8633-435c-a910-ab9abccb5c0d nodeName:}" failed. No retries permitted until 2025-10-07 19:33:23.434130438 +0000 UTC m=+929.512386079 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift") pod "swift-storage-0" (UID: "bd53a283-8633-435c-a910-ab9abccb5c0d") : configmap "swift-ring-files" not found Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.807856 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qmzgx" event={"ID":"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef","Type":"ContainerStarted","Data":"4dc48c9ad019d14a75a10529eb2c56289c3d223b70694f30a28c2a37b21e0512"} Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.808616 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-v5tcc" Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.875866 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-v5tcc"] Oct 07 19:33:22 crc kubenswrapper[4813]: I1007 19:33:22.890915 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-v5tcc"] Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.447553 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:23 crc kubenswrapper[4813]: E1007 19:33:23.447841 4813 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 19:33:23 crc kubenswrapper[4813]: E1007 19:33:23.447863 4813 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 19:33:23 crc kubenswrapper[4813]: E1007 19:33:23.447908 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift podName:bd53a283-8633-435c-a910-ab9abccb5c0d nodeName:}" failed. No retries permitted until 2025-10-07 19:33:25.447893428 +0000 UTC m=+931.526149039 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift") pod "swift-storage-0" (UID: "bd53a283-8633-435c-a910-ab9abccb5c0d") : configmap "swift-ring-files" not found Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.817154 4813 generic.go:334] "Generic (PLEG): container finished" podID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerID="c924b0d98cb3a02699a4f3bc9f32faf2ff2ade3efed234a48be9061cc5f9d438" exitCode=0 Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.817224 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qmzgx" event={"ID":"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef","Type":"ContainerDied","Data":"c924b0d98cb3a02699a4f3bc9f32faf2ff2ade3efed234a48be9061cc5f9d438"} Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.822131 4813 generic.go:334] "Generic (PLEG): container finished" podID="b6ba585c-1da9-427e-b7ad-9a5e5a697a76" containerID="40ea659c1c0f7fb48e8095952a0434f7602ea86c4aaa092ecc7c49b810d7f736" exitCode=0 Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.822206 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" event={"ID":"b6ba585c-1da9-427e-b7ad-9a5e5a697a76","Type":"ContainerDied","Data":"40ea659c1c0f7fb48e8095952a0434f7602ea86c4aaa092ecc7c49b810d7f736"} Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.847731 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ww8fx" event={"ID":"1276e9fd-662d-41f1-8c9d-05abbbfbf0a2","Type":"ContainerStarted","Data":"072078a8a98a0c70ea477ff1b47b909eb8f1b64d464df7d81908e1e5e99b1ff0"} Oct 07 19:33:23 crc kubenswrapper[4813]: I1007 19:33:23.897832 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ww8fx" podStartSLOduration=3.89781463 podStartE2EDuration="3.89781463s" podCreationTimestamp="2025-10-07 19:33:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:33:23.89747383 +0000 UTC m=+929.975729441" watchObservedRunningTime="2025-10-07 19:33:23.89781463 +0000 UTC m=+929.976070241" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.189929 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.262621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-config\") pod \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.262943 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-dns-svc\") pod \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.263057 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs26r\" (UniqueName: \"kubernetes.io/projected/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-kube-api-access-qs26r\") pod \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.263225 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-ovsdbserver-nb\") pod \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\" (UID: \"b6ba585c-1da9-427e-b7ad-9a5e5a697a76\") " Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.267219 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-kube-api-access-qs26r" (OuterVolumeSpecName: "kube-api-access-qs26r") pod "b6ba585c-1da9-427e-b7ad-9a5e5a697a76" (UID: "b6ba585c-1da9-427e-b7ad-9a5e5a697a76"). InnerVolumeSpecName "kube-api-access-qs26r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.281127 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b6ba585c-1da9-427e-b7ad-9a5e5a697a76" (UID: "b6ba585c-1da9-427e-b7ad-9a5e5a697a76"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.283966 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b6ba585c-1da9-427e-b7ad-9a5e5a697a76" (UID: "b6ba585c-1da9-427e-b7ad-9a5e5a697a76"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.284724 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-config" (OuterVolumeSpecName: "config") pod "b6ba585c-1da9-427e-b7ad-9a5e5a697a76" (UID: "b6ba585c-1da9-427e-b7ad-9a5e5a697a76"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.365623 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.365661 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.365674 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.365687 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs26r\" (UniqueName: \"kubernetes.io/projected/b6ba585c-1da9-427e-b7ad-9a5e5a697a76-kube-api-access-qs26r\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.634402 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55ae9a8b-228e-445e-9bbe-2b6a80d2f361" path="/var/lib/kubelet/pods/55ae9a8b-228e-445e-9bbe-2b6a80d2f361/volumes" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.858883 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d09b3567-cc2a-48cc-b1ea-b0c65fee032d","Type":"ContainerStarted","Data":"7193745f66f15228f387c04d71da278ded111c012fcbbdbad4d7a5a5811bb16e"} Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.861998 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qmzgx" event={"ID":"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef","Type":"ContainerStarted","Data":"751ab3ea2587e47b4b3d8686529b1ee934773e5a56066fc6654e0dc9a240b617"} Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.862743 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.870020 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" event={"ID":"b6ba585c-1da9-427e-b7ad-9a5e5a697a76","Type":"ContainerDied","Data":"11f24f03048a4c5029c3e4773c90eb9816a4329bd6b480b4de809cb2fd5fc750"} Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.870058 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-9v9ww" Oct 07 19:33:24 crc kubenswrapper[4813]: I1007 19:33:24.870076 4813 scope.go:117] "RemoveContainer" containerID="40ea659c1c0f7fb48e8095952a0434f7602ea86c4aaa092ecc7c49b810d7f736" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.067655 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-qmzgx" podStartSLOduration=5.067637483 podStartE2EDuration="5.067637483s" podCreationTimestamp="2025-10-07 19:33:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:33:24.89863182 +0000 UTC m=+930.976887431" watchObservedRunningTime="2025-10-07 19:33:25.067637483 +0000 UTC m=+931.145893094" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.096387 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-9v9ww"] Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.102859 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-9v9ww"] Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.467307 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-bj76f"] Oct 07 19:33:25 crc kubenswrapper[4813]: E1007 19:33:25.467816 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ba585c-1da9-427e-b7ad-9a5e5a697a76" containerName="init" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.467843 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ba585c-1da9-427e-b7ad-9a5e5a697a76" containerName="init" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.468158 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ba585c-1da9-427e-b7ad-9a5e5a697a76" containerName="init" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.469024 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.474155 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.474645 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.475726 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.476257 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-bj76f"] Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.494100 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:25 crc kubenswrapper[4813]: E1007 19:33:25.494568 4813 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 19:33:25 crc kubenswrapper[4813]: E1007 19:33:25.494600 4813 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 19:33:25 crc kubenswrapper[4813]: E1007 19:33:25.494666 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift podName:bd53a283-8633-435c-a910-ab9abccb5c0d nodeName:}" failed. No retries permitted until 2025-10-07 19:33:29.494643199 +0000 UTC m=+935.572898840 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift") pod "swift-storage-0" (UID: "bd53a283-8633-435c-a910-ab9abccb5c0d") : configmap "swift-ring-files" not found Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595667 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8cee7433-9535-4db0-aa37-e8fc28bdbf94-etc-swift\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595766 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-swiftconf\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595787 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76xnq\" (UniqueName: \"kubernetes.io/projected/8cee7433-9535-4db0-aa37-e8fc28bdbf94-kube-api-access-76xnq\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595807 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-ring-data-devices\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595847 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-scripts\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595888 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-dispersionconf\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.595904 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-combined-ca-bundle\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697506 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-swiftconf\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697561 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76xnq\" (UniqueName: \"kubernetes.io/projected/8cee7433-9535-4db0-aa37-e8fc28bdbf94-kube-api-access-76xnq\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-ring-data-devices\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697660 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-scripts\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697724 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-dispersionconf\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697746 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-combined-ca-bundle\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.697784 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8cee7433-9535-4db0-aa37-e8fc28bdbf94-etc-swift\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.698659 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8cee7433-9535-4db0-aa37-e8fc28bdbf94-etc-swift\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.699018 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-ring-data-devices\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.699367 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-scripts\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.708818 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-combined-ca-bundle\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.709061 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-swiftconf\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.712264 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-dispersionconf\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.725084 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76xnq\" (UniqueName: \"kubernetes.io/projected/8cee7433-9535-4db0-aa37-e8fc28bdbf94-kube-api-access-76xnq\") pod \"swift-ring-rebalance-bj76f\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.790189 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.890019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"d09b3567-cc2a-48cc-b1ea-b0c65fee032d","Type":"ContainerStarted","Data":"ca0fdc72227422fa075d6c943dfa52ad1249074c2cc4a1614ce73b021783977a"} Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.890375 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 07 19:33:25 crc kubenswrapper[4813]: I1007 19:33:25.922057 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.84580733 podStartE2EDuration="5.922036117s" podCreationTimestamp="2025-10-07 19:33:20 +0000 UTC" firstStartedPulling="2025-10-07 19:33:21.580224728 +0000 UTC m=+927.658480329" lastFinishedPulling="2025-10-07 19:33:24.656453515 +0000 UTC m=+930.734709116" observedRunningTime="2025-10-07 19:33:25.911392291 +0000 UTC m=+931.989647902" watchObservedRunningTime="2025-10-07 19:33:25.922036117 +0000 UTC m=+932.000291728" Oct 07 19:33:26 crc kubenswrapper[4813]: I1007 19:33:26.204283 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-bj76f"] Oct 07 19:33:26 crc kubenswrapper[4813]: I1007 19:33:26.622681 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ba585c-1da9-427e-b7ad-9a5e5a697a76" path="/var/lib/kubelet/pods/b6ba585c-1da9-427e-b7ad-9a5e5a697a76/volumes" Oct 07 19:33:26 crc kubenswrapper[4813]: I1007 19:33:26.900597 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bj76f" event={"ID":"8cee7433-9535-4db0-aa37-e8fc28bdbf94","Type":"ContainerStarted","Data":"704af3fa40c7a9da590127618d5b0af8a4a11b0419d10f6d7b5ac2d79692402d"} Oct 07 19:33:27 crc kubenswrapper[4813]: I1007 19:33:27.536998 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 07 19:33:27 crc kubenswrapper[4813]: I1007 19:33:27.537046 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 07 19:33:27 crc kubenswrapper[4813]: I1007 19:33:27.594063 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 07 19:33:27 crc kubenswrapper[4813]: I1007 19:33:27.979485 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.319928 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-4g6mq"] Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.321922 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.330097 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-4g6mq"] Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.340505 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8hjr\" (UniqueName: \"kubernetes.io/projected/99efc153-28b9-4f27-86a3-b8a913bc66df-kube-api-access-l8hjr\") pod \"placement-db-create-4g6mq\" (UID: \"99efc153-28b9-4f27-86a3-b8a913bc66df\") " pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.351838 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.351899 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.415215 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.442207 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8hjr\" (UniqueName: \"kubernetes.io/projected/99efc153-28b9-4f27-86a3-b8a913bc66df-kube-api-access-l8hjr\") pod \"placement-db-create-4g6mq\" (UID: \"99efc153-28b9-4f27-86a3-b8a913bc66df\") " pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.461297 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8hjr\" (UniqueName: \"kubernetes.io/projected/99efc153-28b9-4f27-86a3-b8a913bc66df-kube-api-access-l8hjr\") pod \"placement-db-create-4g6mq\" (UID: \"99efc153-28b9-4f27-86a3-b8a913bc66df\") " pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.658345 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.671615 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-sbds4"] Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.672615 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sbds4" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.687920 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sbds4"] Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.746636 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqjwn\" (UniqueName: \"kubernetes.io/projected/97db72c2-132d-4b61-afb2-e65936b1352d-kube-api-access-rqjwn\") pod \"glance-db-create-sbds4\" (UID: \"97db72c2-132d-4b61-afb2-e65936b1352d\") " pod="openstack/glance-db-create-sbds4" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.848361 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqjwn\" (UniqueName: \"kubernetes.io/projected/97db72c2-132d-4b61-afb2-e65936b1352d-kube-api-access-rqjwn\") pod \"glance-db-create-sbds4\" (UID: \"97db72c2-132d-4b61-afb2-e65936b1352d\") " pod="openstack/glance-db-create-sbds4" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.868636 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqjwn\" (UniqueName: \"kubernetes.io/projected/97db72c2-132d-4b61-afb2-e65936b1352d-kube-api-access-rqjwn\") pod \"glance-db-create-sbds4\" (UID: \"97db72c2-132d-4b61-afb2-e65936b1352d\") " pod="openstack/glance-db-create-sbds4" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.983847 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 07 19:33:28 crc kubenswrapper[4813]: I1007 19:33:28.994451 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sbds4" Oct 07 19:33:29 crc kubenswrapper[4813]: I1007 19:33:29.559334 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:29 crc kubenswrapper[4813]: E1007 19:33:29.559567 4813 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 07 19:33:29 crc kubenswrapper[4813]: E1007 19:33:29.559594 4813 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 07 19:33:29 crc kubenswrapper[4813]: E1007 19:33:29.559648 4813 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift podName:bd53a283-8633-435c-a910-ab9abccb5c0d nodeName:}" failed. No retries permitted until 2025-10-07 19:33:37.559630671 +0000 UTC m=+943.637886282 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift") pod "swift-storage-0" (UID: "bd53a283-8633-435c-a910-ab9abccb5c0d") : configmap "swift-ring-files" not found Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.384109 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sbds4"] Oct 07 19:33:30 crc kubenswrapper[4813]: W1007 19:33:30.393299 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97db72c2_132d_4b61_afb2_e65936b1352d.slice/crio-bc05e4848cb8e5aff28346b49274cc8ea0528b10b3443d787da90d78e22e7e4d WatchSource:0}: Error finding container bc05e4848cb8e5aff28346b49274cc8ea0528b10b3443d787da90d78e22e7e4d: Status 404 returned error can't find the container with id bc05e4848cb8e5aff28346b49274cc8ea0528b10b3443d787da90d78e22e7e4d Oct 07 19:33:30 crc kubenswrapper[4813]: W1007 19:33:30.452608 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99efc153_28b9_4f27_86a3_b8a913bc66df.slice/crio-8c6090130d61f6848c8ec319401014f4e3c0b14a52113ead33f2b288fccaf360 WatchSource:0}: Error finding container 8c6090130d61f6848c8ec319401014f4e3c0b14a52113ead33f2b288fccaf360: Status 404 returned error can't find the container with id 8c6090130d61f6848c8ec319401014f4e3c0b14a52113ead33f2b288fccaf360 Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.458569 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-4g6mq"] Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.940288 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bj76f" event={"ID":"8cee7433-9535-4db0-aa37-e8fc28bdbf94","Type":"ContainerStarted","Data":"4f6eb102c53ea64e89bac11337b8d75318f7d02a47711d1d8afd01598f991d21"} Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.944452 4813 generic.go:334] "Generic (PLEG): container finished" podID="97db72c2-132d-4b61-afb2-e65936b1352d" containerID="49ed646c9286297ac09320a65314843f97bcf1c7f93927c3b7ee8239062c5102" exitCode=0 Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.944558 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sbds4" event={"ID":"97db72c2-132d-4b61-afb2-e65936b1352d","Type":"ContainerDied","Data":"49ed646c9286297ac09320a65314843f97bcf1c7f93927c3b7ee8239062c5102"} Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.944671 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sbds4" event={"ID":"97db72c2-132d-4b61-afb2-e65936b1352d","Type":"ContainerStarted","Data":"bc05e4848cb8e5aff28346b49274cc8ea0528b10b3443d787da90d78e22e7e4d"} Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.946308 4813 generic.go:334] "Generic (PLEG): container finished" podID="99efc153-28b9-4f27-86a3-b8a913bc66df" containerID="aff9db31e0ffc2a87411c8853c73b0c6761072f763bc58434ca7b205064d4c9b" exitCode=0 Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.946366 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4g6mq" event={"ID":"99efc153-28b9-4f27-86a3-b8a913bc66df","Type":"ContainerDied","Data":"aff9db31e0ffc2a87411c8853c73b0c6761072f763bc58434ca7b205064d4c9b"} Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.946391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4g6mq" event={"ID":"99efc153-28b9-4f27-86a3-b8a913bc66df","Type":"ContainerStarted","Data":"8c6090130d61f6848c8ec319401014f4e3c0b14a52113ead33f2b288fccaf360"} Oct 07 19:33:30 crc kubenswrapper[4813]: I1007 19:33:30.969756 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-bj76f" podStartSLOduration=2.2182060359999998 podStartE2EDuration="5.969737985s" podCreationTimestamp="2025-10-07 19:33:25 +0000 UTC" firstStartedPulling="2025-10-07 19:33:26.219102655 +0000 UTC m=+932.297358266" lastFinishedPulling="2025-10-07 19:33:29.970634604 +0000 UTC m=+936.048890215" observedRunningTime="2025-10-07 19:33:30.962997098 +0000 UTC m=+937.041252709" watchObservedRunningTime="2025-10-07 19:33:30.969737985 +0000 UTC m=+937.047993596" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.255209 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.318985 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mhnfj"] Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.319212 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" containerName="dnsmasq-dns" containerID="cri-o://33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324" gracePeriod=10 Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.802818 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.903624 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-dns-svc\") pod \"9f1866f9-c802-4561-b716-040250f6dbc7\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.904811 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgpr5\" (UniqueName: \"kubernetes.io/projected/9f1866f9-c802-4561-b716-040250f6dbc7-kube-api-access-xgpr5\") pod \"9f1866f9-c802-4561-b716-040250f6dbc7\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.904858 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-config\") pod \"9f1866f9-c802-4561-b716-040250f6dbc7\" (UID: \"9f1866f9-c802-4561-b716-040250f6dbc7\") " Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.910930 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f1866f9-c802-4561-b716-040250f6dbc7-kube-api-access-xgpr5" (OuterVolumeSpecName: "kube-api-access-xgpr5") pod "9f1866f9-c802-4561-b716-040250f6dbc7" (UID: "9f1866f9-c802-4561-b716-040250f6dbc7"). InnerVolumeSpecName "kube-api-access-xgpr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.944237 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-config" (OuterVolumeSpecName: "config") pod "9f1866f9-c802-4561-b716-040250f6dbc7" (UID: "9f1866f9-c802-4561-b716-040250f6dbc7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.954046 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f1866f9-c802-4561-b716-040250f6dbc7" (UID: "9f1866f9-c802-4561-b716-040250f6dbc7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.957790 4813 generic.go:334] "Generic (PLEG): container finished" podID="9f1866f9-c802-4561-b716-040250f6dbc7" containerID="33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324" exitCode=0 Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.957891 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" event={"ID":"9f1866f9-c802-4561-b716-040250f6dbc7","Type":"ContainerDied","Data":"33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324"} Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.957957 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" event={"ID":"9f1866f9-c802-4561-b716-040250f6dbc7","Type":"ContainerDied","Data":"3020248de48783b0282b6931e94218ad01b41c5443403277600ab4b8301c9ac0"} Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.958021 4813 scope.go:117] "RemoveContainer" containerID="33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324" Oct 07 19:33:31 crc kubenswrapper[4813]: I1007 19:33:31.958249 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-mhnfj" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.006588 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.006625 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgpr5\" (UniqueName: \"kubernetes.io/projected/9f1866f9-c802-4561-b716-040250f6dbc7-kube-api-access-xgpr5\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.006639 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f1866f9-c802-4561-b716-040250f6dbc7-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.043284 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mhnfj"] Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.055680 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-mhnfj"] Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.060988 4813 scope.go:117] "RemoveContainer" containerID="b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.081870 4813 scope.go:117] "RemoveContainer" containerID="33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324" Oct 07 19:33:32 crc kubenswrapper[4813]: E1007 19:33:32.094867 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324\": container with ID starting with 33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324 not found: ID does not exist" containerID="33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.094914 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324"} err="failed to get container status \"33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324\": rpc error: code = NotFound desc = could not find container \"33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324\": container with ID starting with 33a0c73dcbfd23978699f3757422970b875c9b1ce03bb4a13b9878948e5f9324 not found: ID does not exist" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.094945 4813 scope.go:117] "RemoveContainer" containerID="b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3" Oct 07 19:33:32 crc kubenswrapper[4813]: E1007 19:33:32.095991 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3\": container with ID starting with b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3 not found: ID does not exist" containerID="b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.096033 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3"} err="failed to get container status \"b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3\": rpc error: code = NotFound desc = could not find container \"b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3\": container with ID starting with b0275d1a75551af466bb46b5283ce04fd3b514831aaac38da545b41812adf5a3 not found: ID does not exist" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.300096 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.344018 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sbds4" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.413015 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqjwn\" (UniqueName: \"kubernetes.io/projected/97db72c2-132d-4b61-afb2-e65936b1352d-kube-api-access-rqjwn\") pod \"97db72c2-132d-4b61-afb2-e65936b1352d\" (UID: \"97db72c2-132d-4b61-afb2-e65936b1352d\") " Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.413227 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8hjr\" (UniqueName: \"kubernetes.io/projected/99efc153-28b9-4f27-86a3-b8a913bc66df-kube-api-access-l8hjr\") pod \"99efc153-28b9-4f27-86a3-b8a913bc66df\" (UID: \"99efc153-28b9-4f27-86a3-b8a913bc66df\") " Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.417962 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99efc153-28b9-4f27-86a3-b8a913bc66df-kube-api-access-l8hjr" (OuterVolumeSpecName: "kube-api-access-l8hjr") pod "99efc153-28b9-4f27-86a3-b8a913bc66df" (UID: "99efc153-28b9-4f27-86a3-b8a913bc66df"). InnerVolumeSpecName "kube-api-access-l8hjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.418083 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97db72c2-132d-4b61-afb2-e65936b1352d-kube-api-access-rqjwn" (OuterVolumeSpecName: "kube-api-access-rqjwn") pod "97db72c2-132d-4b61-afb2-e65936b1352d" (UID: "97db72c2-132d-4b61-afb2-e65936b1352d"). InnerVolumeSpecName "kube-api-access-rqjwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.514837 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqjwn\" (UniqueName: \"kubernetes.io/projected/97db72c2-132d-4b61-afb2-e65936b1352d-kube-api-access-rqjwn\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.514865 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8hjr\" (UniqueName: \"kubernetes.io/projected/99efc153-28b9-4f27-86a3-b8a913bc66df-kube-api-access-l8hjr\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.613160 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" path="/var/lib/kubelet/pods/9f1866f9-c802-4561-b716-040250f6dbc7/volumes" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.967877 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-4g6mq" event={"ID":"99efc153-28b9-4f27-86a3-b8a913bc66df","Type":"ContainerDied","Data":"8c6090130d61f6848c8ec319401014f4e3c0b14a52113ead33f2b288fccaf360"} Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.968222 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c6090130d61f6848c8ec319401014f4e3c0b14a52113ead33f2b288fccaf360" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.967899 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-4g6mq" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.973800 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sbds4" event={"ID":"97db72c2-132d-4b61-afb2-e65936b1352d","Type":"ContainerDied","Data":"bc05e4848cb8e5aff28346b49274cc8ea0528b10b3443d787da90d78e22e7e4d"} Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.973852 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc05e4848cb8e5aff28346b49274cc8ea0528b10b3443d787da90d78e22e7e4d" Oct 07 19:33:32 crc kubenswrapper[4813]: I1007 19:33:32.973866 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sbds4" Oct 07 19:33:36 crc kubenswrapper[4813]: I1007 19:33:36.232936 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.608255 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.626857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bd53a283-8633-435c-a910-ab9abccb5c0d-etc-swift\") pod \"swift-storage-0\" (UID: \"bd53a283-8633-435c-a910-ab9abccb5c0d\") " pod="openstack/swift-storage-0" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.690076 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.982476 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-kgrtz"] Oct 07 19:33:37 crc kubenswrapper[4813]: E1007 19:33:37.983098 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97db72c2-132d-4b61-afb2-e65936b1352d" containerName="mariadb-database-create" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983115 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="97db72c2-132d-4b61-afb2-e65936b1352d" containerName="mariadb-database-create" Oct 07 19:33:37 crc kubenswrapper[4813]: E1007 19:33:37.983141 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" containerName="init" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983147 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" containerName="init" Oct 07 19:33:37 crc kubenswrapper[4813]: E1007 19:33:37.983157 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99efc153-28b9-4f27-86a3-b8a913bc66df" containerName="mariadb-database-create" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983164 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="99efc153-28b9-4f27-86a3-b8a913bc66df" containerName="mariadb-database-create" Oct 07 19:33:37 crc kubenswrapper[4813]: E1007 19:33:37.983177 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" containerName="dnsmasq-dns" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983182 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" containerName="dnsmasq-dns" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983341 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="99efc153-28b9-4f27-86a3-b8a913bc66df" containerName="mariadb-database-create" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983358 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="97db72c2-132d-4b61-afb2-e65936b1352d" containerName="mariadb-database-create" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983366 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f1866f9-c802-4561-b716-040250f6dbc7" containerName="dnsmasq-dns" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.983852 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:37 crc kubenswrapper[4813]: I1007 19:33:37.991797 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kgrtz"] Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.017760 4813 generic.go:334] "Generic (PLEG): container finished" podID="8cee7433-9535-4db0-aa37-e8fc28bdbf94" containerID="4f6eb102c53ea64e89bac11337b8d75318f7d02a47711d1d8afd01598f991d21" exitCode=0 Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.017796 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bj76f" event={"ID":"8cee7433-9535-4db0-aa37-e8fc28bdbf94","Type":"ContainerDied","Data":"4f6eb102c53ea64e89bac11337b8d75318f7d02a47711d1d8afd01598f991d21"} Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.117806 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp6t5\" (UniqueName: \"kubernetes.io/projected/c2a590f8-2fe3-48e8-b1fa-599605162117-kube-api-access-kp6t5\") pod \"keystone-db-create-kgrtz\" (UID: \"c2a590f8-2fe3-48e8-b1fa-599605162117\") " pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.219893 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp6t5\" (UniqueName: \"kubernetes.io/projected/c2a590f8-2fe3-48e8-b1fa-599605162117-kube-api-access-kp6t5\") pod \"keystone-db-create-kgrtz\" (UID: \"c2a590f8-2fe3-48e8-b1fa-599605162117\") " pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.243308 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.247884 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp6t5\" (UniqueName: \"kubernetes.io/projected/c2a590f8-2fe3-48e8-b1fa-599605162117-kube-api-access-kp6t5\") pod \"keystone-db-create-kgrtz\" (UID: \"c2a590f8-2fe3-48e8-b1fa-599605162117\") " pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.306644 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.750088 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kgrtz"] Oct 07 19:33:38 crc kubenswrapper[4813]: W1007 19:33:38.757505 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2a590f8_2fe3_48e8_b1fa_599605162117.slice/crio-87b50bb01ec4fe1b5ffb516d412c134c895da8422dcdc8aef47d695ac23b6d16 WatchSource:0}: Error finding container 87b50bb01ec4fe1b5ffb516d412c134c895da8422dcdc8aef47d695ac23b6d16: Status 404 returned error can't find the container with id 87b50bb01ec4fe1b5ffb516d412c134c895da8422dcdc8aef47d695ac23b6d16 Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.823048 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-2f75-account-create-4bvsq"] Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.824125 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.826249 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.838253 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2f75-account-create-4bvsq"] Oct 07 19:33:38 crc kubenswrapper[4813]: I1007 19:33:38.932830 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wffbw\" (UniqueName: \"kubernetes.io/projected/0b0553df-a455-41ec-938f-ef494964014d-kube-api-access-wffbw\") pod \"glance-2f75-account-create-4bvsq\" (UID: \"0b0553df-a455-41ec-938f-ef494964014d\") " pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.026575 4813 generic.go:334] "Generic (PLEG): container finished" podID="c2a590f8-2fe3-48e8-b1fa-599605162117" containerID="249d5e7b1d54c523dc81d83bc28d0c3bfb98d89a38be4cc8ba5ab6a99134d85f" exitCode=0 Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.026641 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kgrtz" event={"ID":"c2a590f8-2fe3-48e8-b1fa-599605162117","Type":"ContainerDied","Data":"249d5e7b1d54c523dc81d83bc28d0c3bfb98d89a38be4cc8ba5ab6a99134d85f"} Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.026669 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kgrtz" event={"ID":"c2a590f8-2fe3-48e8-b1fa-599605162117","Type":"ContainerStarted","Data":"87b50bb01ec4fe1b5ffb516d412c134c895da8422dcdc8aef47d695ac23b6d16"} Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.027966 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"9f93170124522576afd29a0c6071200963cecf2bbb66f35e812e4ebd5b451f13"} Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.035002 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wffbw\" (UniqueName: \"kubernetes.io/projected/0b0553df-a455-41ec-938f-ef494964014d-kube-api-access-wffbw\") pod \"glance-2f75-account-create-4bvsq\" (UID: \"0b0553df-a455-41ec-938f-ef494964014d\") " pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.074292 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wffbw\" (UniqueName: \"kubernetes.io/projected/0b0553df-a455-41ec-938f-ef494964014d-kube-api-access-wffbw\") pod \"glance-2f75-account-create-4bvsq\" (UID: \"0b0553df-a455-41ec-938f-ef494964014d\") " pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.172297 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.383550 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554062 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-dispersionconf\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554115 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-ring-data-devices\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554195 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8cee7433-9535-4db0-aa37-e8fc28bdbf94-etc-swift\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554241 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-swiftconf\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554263 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76xnq\" (UniqueName: \"kubernetes.io/projected/8cee7433-9535-4db0-aa37-e8fc28bdbf94-kube-api-access-76xnq\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554402 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-combined-ca-bundle\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.554428 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-scripts\") pod \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\" (UID: \"8cee7433-9535-4db0-aa37-e8fc28bdbf94\") " Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.555419 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cee7433-9535-4db0-aa37-e8fc28bdbf94-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.557634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.559397 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cee7433-9535-4db0-aa37-e8fc28bdbf94-kube-api-access-76xnq" (OuterVolumeSpecName: "kube-api-access-76xnq") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "kube-api-access-76xnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.566683 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.589524 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.596579 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.597097 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-scripts" (OuterVolumeSpecName: "scripts") pod "8cee7433-9535-4db0-aa37-e8fc28bdbf94" (UID: "8cee7433-9535-4db0-aa37-e8fc28bdbf94"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.656983 4813 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.657213 4813 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.657222 4813 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8cee7433-9535-4db0-aa37-e8fc28bdbf94-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.657232 4813 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.657241 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76xnq\" (UniqueName: \"kubernetes.io/projected/8cee7433-9535-4db0-aa37-e8fc28bdbf94-kube-api-access-76xnq\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.657251 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8cee7433-9535-4db0-aa37-e8fc28bdbf94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.657259 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8cee7433-9535-4db0-aa37-e8fc28bdbf94-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:39 crc kubenswrapper[4813]: I1007 19:33:39.767863 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2f75-account-create-4bvsq"] Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.038112 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"bc32fca4d6a8b6c893ebb98f38ffd9e09834d442c693dbe54eea5ee0609fd584"} Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.038151 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"1b9ad844710170445736f52626b5e5b1a636e68e31fb4a38fbe747880ce53d1c"} Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.038162 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"0d7351e5956e15bcd4e2137d68663de022acab6c18dce40f8d04117e9ea47be0"} Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.041476 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bj76f" event={"ID":"8cee7433-9535-4db0-aa37-e8fc28bdbf94","Type":"ContainerDied","Data":"704af3fa40c7a9da590127618d5b0af8a4a11b0419d10f6d7b5ac2d79692402d"} Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.041514 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="704af3fa40c7a9da590127618d5b0af8a4a11b0419d10f6d7b5ac2d79692402d" Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.041586 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bj76f" Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.051874 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f75-account-create-4bvsq" event={"ID":"0b0553df-a455-41ec-938f-ef494964014d","Type":"ContainerStarted","Data":"20f0a158a7b5377e8de387bfcd55424edf328ca16bbc7ce7ead0c523b3c703fc"} Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.051932 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f75-account-create-4bvsq" event={"ID":"0b0553df-a455-41ec-938f-ef494964014d","Type":"ContainerStarted","Data":"1458bae05889af54a373f31b1d34c2a9dd7c34e88a6379bd28416c71325bfba5"} Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.094404 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-2f75-account-create-4bvsq" podStartSLOduration=2.09438752 podStartE2EDuration="2.09438752s" podCreationTimestamp="2025-10-07 19:33:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:33:40.074127487 +0000 UTC m=+946.152383108" watchObservedRunningTime="2025-10-07 19:33:40.09438752 +0000 UTC m=+946.172643131" Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.308032 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.479535 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp6t5\" (UniqueName: \"kubernetes.io/projected/c2a590f8-2fe3-48e8-b1fa-599605162117-kube-api-access-kp6t5\") pod \"c2a590f8-2fe3-48e8-b1fa-599605162117\" (UID: \"c2a590f8-2fe3-48e8-b1fa-599605162117\") " Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.485426 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2a590f8-2fe3-48e8-b1fa-599605162117-kube-api-access-kp6t5" (OuterVolumeSpecName: "kube-api-access-kp6t5") pod "c2a590f8-2fe3-48e8-b1fa-599605162117" (UID: "c2a590f8-2fe3-48e8-b1fa-599605162117"). InnerVolumeSpecName "kube-api-access-kp6t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:40 crc kubenswrapper[4813]: I1007 19:33:40.581657 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp6t5\" (UniqueName: \"kubernetes.io/projected/c2a590f8-2fe3-48e8-b1fa-599605162117-kube-api-access-kp6t5\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:41 crc kubenswrapper[4813]: I1007 19:33:41.060643 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kgrtz" Oct 07 19:33:41 crc kubenswrapper[4813]: I1007 19:33:41.060650 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kgrtz" event={"ID":"c2a590f8-2fe3-48e8-b1fa-599605162117","Type":"ContainerDied","Data":"87b50bb01ec4fe1b5ffb516d412c134c895da8422dcdc8aef47d695ac23b6d16"} Oct 07 19:33:41 crc kubenswrapper[4813]: I1007 19:33:41.061547 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87b50bb01ec4fe1b5ffb516d412c134c895da8422dcdc8aef47d695ac23b6d16" Oct 07 19:33:41 crc kubenswrapper[4813]: I1007 19:33:41.069444 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"c9311338f0eb61e22b8ca8c54068d161fa8538b8826c6eb46979e51fe8499551"} Oct 07 19:33:41 crc kubenswrapper[4813]: I1007 19:33:41.072421 4813 generic.go:334] "Generic (PLEG): container finished" podID="0b0553df-a455-41ec-938f-ef494964014d" containerID="20f0a158a7b5377e8de387bfcd55424edf328ca16bbc7ce7ead0c523b3c703fc" exitCode=0 Oct 07 19:33:41 crc kubenswrapper[4813]: I1007 19:33:41.072455 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f75-account-create-4bvsq" event={"ID":"0b0553df-a455-41ec-938f-ef494964014d","Type":"ContainerDied","Data":"20f0a158a7b5377e8de387bfcd55424edf328ca16bbc7ce7ead0c523b3c703fc"} Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.083001 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"7ef4a241e414ea3cfd2f471cefb7d370199b154ed7f22deb6bb36849a41b59e3"} Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.084717 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"d7b9daf59ddeadd1a0bae7ffe827ff0c86d0d86985c5a3ba242f8f6da1f0924e"} Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.084854 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"74e90964af0ddf00172e914a896f2d317e3b04fd538795a0fdbdaa309b1c940b"} Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.084982 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"97501e473b664a9a6ed333d9f03b0269698bbdfff7adf38de653ac65235812b2"} Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.377677 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.514875 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wffbw\" (UniqueName: \"kubernetes.io/projected/0b0553df-a455-41ec-938f-ef494964014d-kube-api-access-wffbw\") pod \"0b0553df-a455-41ec-938f-ef494964014d\" (UID: \"0b0553df-a455-41ec-938f-ef494964014d\") " Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.532775 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b0553df-a455-41ec-938f-ef494964014d-kube-api-access-wffbw" (OuterVolumeSpecName: "kube-api-access-wffbw") pod "0b0553df-a455-41ec-938f-ef494964014d" (UID: "0b0553df-a455-41ec-938f-ef494964014d"). InnerVolumeSpecName "kube-api-access-wffbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:42 crc kubenswrapper[4813]: I1007 19:33:42.617259 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wffbw\" (UniqueName: \"kubernetes.io/projected/0b0553df-a455-41ec-938f-ef494964014d-kube-api-access-wffbw\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:43 crc kubenswrapper[4813]: I1007 19:33:43.102253 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2f75-account-create-4bvsq" Oct 07 19:33:43 crc kubenswrapper[4813]: I1007 19:33:43.102310 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2f75-account-create-4bvsq" event={"ID":"0b0553df-a455-41ec-938f-ef494964014d","Type":"ContainerDied","Data":"1458bae05889af54a373f31b1d34c2a9dd7c34e88a6379bd28416c71325bfba5"} Oct 07 19:33:43 crc kubenswrapper[4813]: I1007 19:33:43.102411 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1458bae05889af54a373f31b1d34c2a9dd7c34e88a6379bd28416c71325bfba5" Oct 07 19:33:43 crc kubenswrapper[4813]: I1007 19:33:43.111713 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"5a68bf9f629005b4257388a11717cbc11169935ebec9f4f840a4febf1a0faffe"} Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.080372 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-6x4qp"] Oct 07 19:33:44 crc kubenswrapper[4813]: E1007 19:33:44.081017 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0553df-a455-41ec-938f-ef494964014d" containerName="mariadb-account-create" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.081055 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0553df-a455-41ec-938f-ef494964014d" containerName="mariadb-account-create" Oct 07 19:33:44 crc kubenswrapper[4813]: E1007 19:33:44.081085 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2a590f8-2fe3-48e8-b1fa-599605162117" containerName="mariadb-database-create" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.081094 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2a590f8-2fe3-48e8-b1fa-599605162117" containerName="mariadb-database-create" Oct 07 19:33:44 crc kubenswrapper[4813]: E1007 19:33:44.081117 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cee7433-9535-4db0-aa37-e8fc28bdbf94" containerName="swift-ring-rebalance" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.081127 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cee7433-9535-4db0-aa37-e8fc28bdbf94" containerName="swift-ring-rebalance" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.081353 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cee7433-9535-4db0-aa37-e8fc28bdbf94" containerName="swift-ring-rebalance" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.081378 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2a590f8-2fe3-48e8-b1fa-599605162117" containerName="mariadb-database-create" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.081420 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0553df-a455-41ec-938f-ef494964014d" containerName="mariadb-account-create" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.082044 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.087017 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.087031 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mjwqx" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.095278 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6x4qp"] Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.128102 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"75ec725a1bc27b18c4612a0a4729eab330e7b9a8185f19221b87721a7cb54027"} Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.128148 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"fa9f205c9973247c6eac3c00136bd57a246531fb3d570fb4470b5b1bc4bb8469"} Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.128163 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"024439b1e3b254cda211fec4526598246921de19e69468d34d8f654dbf7a8c72"} Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.128175 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"3a28ed45f8ae33aca8fe3e5f38dfc4d991f2d9849284036f9ed4834ef4fbd44c"} Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.242452 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-config-data\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.242491 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-combined-ca-bundle\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.242591 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-db-sync-config-data\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.242752 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk6sd\" (UniqueName: \"kubernetes.io/projected/9d72dd22-ed08-4510-9c62-d01807e11064-kube-api-access-fk6sd\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.344391 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk6sd\" (UniqueName: \"kubernetes.io/projected/9d72dd22-ed08-4510-9c62-d01807e11064-kube-api-access-fk6sd\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.344482 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-config-data\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.344505 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-combined-ca-bundle\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.344553 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-db-sync-config-data\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.349431 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-db-sync-config-data\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.349468 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-config-data\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.350066 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-combined-ca-bundle\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.379088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk6sd\" (UniqueName: \"kubernetes.io/projected/9d72dd22-ed08-4510-9c62-d01807e11064-kube-api-access-fk6sd\") pod \"glance-db-sync-6x4qp\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.407398 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6x4qp" Oct 07 19:33:44 crc kubenswrapper[4813]: I1007 19:33:44.742875 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6x4qp"] Oct 07 19:33:44 crc kubenswrapper[4813]: W1007 19:33:44.745545 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d72dd22_ed08_4510_9c62_d01807e11064.slice/crio-10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914 WatchSource:0}: Error finding container 10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914: Status 404 returned error can't find the container with id 10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914 Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.143506 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6x4qp" event={"ID":"9d72dd22-ed08-4510-9c62-d01807e11064","Type":"ContainerStarted","Data":"10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914"} Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.146012 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jd55f" podUID="a162a130-6094-42c0-a3d1-489de4a7fac4" containerName="ovn-controller" probeResult="failure" output=< Oct 07 19:33:45 crc kubenswrapper[4813]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 07 19:33:45 crc kubenswrapper[4813]: > Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.153302 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"9149ac39850d3e3ea196698c6573a3aee38b636663ff3212a67687f466d87b6e"} Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.163997 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.169161 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-dmq4j" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.535441 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jd55f-config-8sj75"] Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.536603 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.539590 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.560653 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jd55f-config-8sj75"] Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.668258 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-additional-scripts\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.668362 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-scripts\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.668391 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfjb6\" (UniqueName: \"kubernetes.io/projected/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-kube-api-access-tfjb6\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.668572 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.668701 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-log-ovn\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.668747 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run-ovn\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771100 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771173 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-log-ovn\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771194 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run-ovn\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771229 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-additional-scripts\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771300 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-scripts\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771316 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfjb6\" (UniqueName: \"kubernetes.io/projected/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-kube-api-access-tfjb6\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771475 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-log-ovn\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.771799 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run-ovn\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.772115 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-additional-scripts\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.772173 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.774031 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-scripts\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.795111 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfjb6\" (UniqueName: \"kubernetes.io/projected/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-kube-api-access-tfjb6\") pod \"ovn-controller-jd55f-config-8sj75\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:45 crc kubenswrapper[4813]: I1007 19:33:45.863591 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.180249 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bd53a283-8633-435c-a910-ab9abccb5c0d","Type":"ContainerStarted","Data":"a274fc64fe6dc2bd5ccfed051b72323c310a06817fdd94d37827a679c4107eb7"} Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.210906 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=21.656447763 podStartE2EDuration="26.210888736s" podCreationTimestamp="2025-10-07 19:33:20 +0000 UTC" firstStartedPulling="2025-10-07 19:33:38.249137682 +0000 UTC m=+944.327393323" lastFinishedPulling="2025-10-07 19:33:42.803578685 +0000 UTC m=+948.881834296" observedRunningTime="2025-10-07 19:33:46.208389327 +0000 UTC m=+952.286644948" watchObservedRunningTime="2025-10-07 19:33:46.210888736 +0000 UTC m=+952.289144357" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.326712 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jd55f-config-8sj75"] Oct 07 19:33:46 crc kubenswrapper[4813]: W1007 19:33:46.337650 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bed85fd_c03c_4733_a6e3_bbc61f25f09c.slice/crio-e7af2017c6ad751af29119cc7c7a8ea035bf28c8ce7b2714fc4ccae7bfe84b7e WatchSource:0}: Error finding container e7af2017c6ad751af29119cc7c7a8ea035bf28c8ce7b2714fc4ccae7bfe84b7e: Status 404 returned error can't find the container with id e7af2017c6ad751af29119cc7c7a8ea035bf28c8ce7b2714fc4ccae7bfe84b7e Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.467595 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-kwxf9"] Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.469779 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.472698 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.521014 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-kwxf9"] Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.587241 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.587420 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.587554 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-config\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.587682 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.587742 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm54m\" (UniqueName: \"kubernetes.io/projected/b33dcc21-d1fc-4056-8f3b-49acddf0650f-kube-api-access-gm54m\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.587809 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.689637 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-config\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.689708 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.689749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm54m\" (UniqueName: \"kubernetes.io/projected/b33dcc21-d1fc-4056-8f3b-49acddf0650f-kube-api-access-gm54m\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.689780 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.689877 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.689910 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.690738 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-config\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.691000 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.691148 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.691752 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.692385 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.718734 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm54m\" (UniqueName: \"kubernetes.io/projected/b33dcc21-d1fc-4056-8f3b-49acddf0650f-kube-api-access-gm54m\") pod \"dnsmasq-dns-77585f5f8c-kwxf9\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:46 crc kubenswrapper[4813]: I1007 19:33:46.791970 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:47 crc kubenswrapper[4813]: I1007 19:33:47.188484 4813 generic.go:334] "Generic (PLEG): container finished" podID="5bed85fd-c03c-4733-a6e3-bbc61f25f09c" containerID="23a40ebcb20e53085f5983ca43357fa0f9d66710836a6e3c2f505e3bebc4f0f3" exitCode=0 Oct 07 19:33:47 crc kubenswrapper[4813]: I1007 19:33:47.188668 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f-config-8sj75" event={"ID":"5bed85fd-c03c-4733-a6e3-bbc61f25f09c","Type":"ContainerDied","Data":"23a40ebcb20e53085f5983ca43357fa0f9d66710836a6e3c2f505e3bebc4f0f3"} Oct 07 19:33:47 crc kubenswrapper[4813]: I1007 19:33:47.188976 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f-config-8sj75" event={"ID":"5bed85fd-c03c-4733-a6e3-bbc61f25f09c","Type":"ContainerStarted","Data":"e7af2017c6ad751af29119cc7c7a8ea035bf28c8ce7b2714fc4ccae7bfe84b7e"} Oct 07 19:33:47 crc kubenswrapper[4813]: I1007 19:33:47.235317 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-kwxf9"] Oct 07 19:33:47 crc kubenswrapper[4813]: W1007 19:33:47.240186 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb33dcc21_d1fc_4056_8f3b_49acddf0650f.slice/crio-0fe4618d2a470cc8a04c5e9ba5cb92a6199c483ce0ffdee4fbad527c24815816 WatchSource:0}: Error finding container 0fe4618d2a470cc8a04c5e9ba5cb92a6199c483ce0ffdee4fbad527c24815816: Status 404 returned error can't find the container with id 0fe4618d2a470cc8a04c5e9ba5cb92a6199c483ce0ffdee4fbad527c24815816 Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.131850 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-ab07-account-create-llmcr"] Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.133337 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.135436 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.138940 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-ab07-account-create-llmcr"] Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.198268 4813 generic.go:334] "Generic (PLEG): container finished" podID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerID="936e5dd986f371614184c8ef5fcc173130090ec90defe42299fb8b1e66fa7287" exitCode=0 Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.198571 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" event={"ID":"b33dcc21-d1fc-4056-8f3b-49acddf0650f","Type":"ContainerDied","Data":"936e5dd986f371614184c8ef5fcc173130090ec90defe42299fb8b1e66fa7287"} Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.198628 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" event={"ID":"b33dcc21-d1fc-4056-8f3b-49acddf0650f","Type":"ContainerStarted","Data":"0fe4618d2a470cc8a04c5e9ba5cb92a6199c483ce0ffdee4fbad527c24815816"} Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.222257 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mktm\" (UniqueName: \"kubernetes.io/projected/af27a525-e196-45c9-bf20-7bce89dfae1f-kube-api-access-4mktm\") pod \"keystone-ab07-account-create-llmcr\" (UID: \"af27a525-e196-45c9-bf20-7bce89dfae1f\") " pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.326075 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mktm\" (UniqueName: \"kubernetes.io/projected/af27a525-e196-45c9-bf20-7bce89dfae1f-kube-api-access-4mktm\") pod \"keystone-ab07-account-create-llmcr\" (UID: \"af27a525-e196-45c9-bf20-7bce89dfae1f\") " pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.344827 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mktm\" (UniqueName: \"kubernetes.io/projected/af27a525-e196-45c9-bf20-7bce89dfae1f-kube-api-access-4mktm\") pod \"keystone-ab07-account-create-llmcr\" (UID: \"af27a525-e196-45c9-bf20-7bce89dfae1f\") " pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.425203 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5cae-account-create-qzhks"] Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.427391 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.429288 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.435834 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5cae-account-create-qzhks"] Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.458763 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.529221 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fk99\" (UniqueName: \"kubernetes.io/projected/efc337fb-89a0-4837-9137-ea0d3b8f51eb-kube-api-access-7fk99\") pod \"placement-5cae-account-create-qzhks\" (UID: \"efc337fb-89a0-4837-9137-ea0d3b8f51eb\") " pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.627831 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.633948 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fk99\" (UniqueName: \"kubernetes.io/projected/efc337fb-89a0-4837-9137-ea0d3b8f51eb-kube-api-access-7fk99\") pod \"placement-5cae-account-create-qzhks\" (UID: \"efc337fb-89a0-4837-9137-ea0d3b8f51eb\") " pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.658796 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fk99\" (UniqueName: \"kubernetes.io/projected/efc337fb-89a0-4837-9137-ea0d3b8f51eb-kube-api-access-7fk99\") pod \"placement-5cae-account-create-qzhks\" (UID: \"efc337fb-89a0-4837-9137-ea0d3b8f51eb\") " pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737051 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-scripts\") pod \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737138 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfjb6\" (UniqueName: \"kubernetes.io/projected/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-kube-api-access-tfjb6\") pod \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737171 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run-ovn\") pod \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737240 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-additional-scripts\") pod \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737280 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run\") pod \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737359 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-log-ovn\") pod \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\" (UID: \"5bed85fd-c03c-4733-a6e3-bbc61f25f09c\") " Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.737722 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5bed85fd-c03c-4733-a6e3-bbc61f25f09c" (UID: "5bed85fd-c03c-4733-a6e3-bbc61f25f09c"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "5bed85fd-c03c-4733-a6e3-bbc61f25f09c" (UID: "5bed85fd-c03c-4733-a6e3-bbc61f25f09c"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738492 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run" (OuterVolumeSpecName: "var-run") pod "5bed85fd-c03c-4733-a6e3-bbc61f25f09c" (UID: "5bed85fd-c03c-4733-a6e3-bbc61f25f09c"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738544 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5bed85fd-c03c-4733-a6e3-bbc61f25f09c" (UID: "5bed85fd-c03c-4733-a6e3-bbc61f25f09c"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738676 4813 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738693 4813 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738701 4813 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738712 4813 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-var-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.738702 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-scripts" (OuterVolumeSpecName: "scripts") pod "5bed85fd-c03c-4733-a6e3-bbc61f25f09c" (UID: "5bed85fd-c03c-4733-a6e3-bbc61f25f09c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.741700 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-kube-api-access-tfjb6" (OuterVolumeSpecName: "kube-api-access-tfjb6") pod "5bed85fd-c03c-4733-a6e3-bbc61f25f09c" (UID: "5bed85fd-c03c-4733-a6e3-bbc61f25f09c"). InnerVolumeSpecName "kube-api-access-tfjb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.766840 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.844548 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.844569 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfjb6\" (UniqueName: \"kubernetes.io/projected/5bed85fd-c03c-4733-a6e3-bbc61f25f09c-kube-api-access-tfjb6\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:48 crc kubenswrapper[4813]: I1007 19:33:48.937976 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-ab07-account-create-llmcr"] Oct 07 19:33:48 crc kubenswrapper[4813]: W1007 19:33:48.951226 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf27a525_e196_45c9_bf20_7bce89dfae1f.slice/crio-717dbd0b934d59dd8671bbbb5bf8da0fdc981171f49435ecefe1deb7901e1dcb WatchSource:0}: Error finding container 717dbd0b934d59dd8671bbbb5bf8da0fdc981171f49435ecefe1deb7901e1dcb: Status 404 returned error can't find the container with id 717dbd0b934d59dd8671bbbb5bf8da0fdc981171f49435ecefe1deb7901e1dcb Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.200626 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5cae-account-create-qzhks"] Oct 07 19:33:49 crc kubenswrapper[4813]: W1007 19:33:49.206557 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefc337fb_89a0_4837_9137_ea0d3b8f51eb.slice/crio-0c304989d855d843b6dac691e157cbed121fe59ccfe3ca5a3f7edc2e3c6e5bca WatchSource:0}: Error finding container 0c304989d855d843b6dac691e157cbed121fe59ccfe3ca5a3f7edc2e3c6e5bca: Status 404 returned error can't find the container with id 0c304989d855d843b6dac691e157cbed121fe59ccfe3ca5a3f7edc2e3c6e5bca Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.208363 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" event={"ID":"b33dcc21-d1fc-4056-8f3b-49acddf0650f","Type":"ContainerStarted","Data":"e2e164c13fcc7c95481be37c3139b57bd833a269c3902fe65b00004f0b7f4671"} Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.208492 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.216681 4813 generic.go:334] "Generic (PLEG): container finished" podID="af27a525-e196-45c9-bf20-7bce89dfae1f" containerID="6d784c2d4053e44e8fbd3d6387c9158261367aed26d93538a11d46a80e648de7" exitCode=0 Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.216781 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-ab07-account-create-llmcr" event={"ID":"af27a525-e196-45c9-bf20-7bce89dfae1f","Type":"ContainerDied","Data":"6d784c2d4053e44e8fbd3d6387c9158261367aed26d93538a11d46a80e648de7"} Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.216824 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-ab07-account-create-llmcr" event={"ID":"af27a525-e196-45c9-bf20-7bce89dfae1f","Type":"ContainerStarted","Data":"717dbd0b934d59dd8671bbbb5bf8da0fdc981171f49435ecefe1deb7901e1dcb"} Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.218870 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f-config-8sj75" event={"ID":"5bed85fd-c03c-4733-a6e3-bbc61f25f09c","Type":"ContainerDied","Data":"e7af2017c6ad751af29119cc7c7a8ea035bf28c8ce7b2714fc4ccae7bfe84b7e"} Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.218897 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7af2017c6ad751af29119cc7c7a8ea035bf28c8ce7b2714fc4ccae7bfe84b7e" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.218947 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-8sj75" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.237212 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" podStartSLOduration=3.237193617 podStartE2EDuration="3.237193617s" podCreationTimestamp="2025-10-07 19:33:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:33:49.232100726 +0000 UTC m=+955.310356337" watchObservedRunningTime="2025-10-07 19:33:49.237193617 +0000 UTC m=+955.315449228" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.722516 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jd55f-config-8sj75"] Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.730556 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jd55f-config-8sj75"] Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.828884 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jd55f-config-c4cgc"] Oct 07 19:33:49 crc kubenswrapper[4813]: E1007 19:33:49.829357 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bed85fd-c03c-4733-a6e3-bbc61f25f09c" containerName="ovn-config" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.829380 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bed85fd-c03c-4733-a6e3-bbc61f25f09c" containerName="ovn-config" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.829596 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bed85fd-c03c-4733-a6e3-bbc61f25f09c" containerName="ovn-config" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.830269 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.835160 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.843872 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jd55f-config-c4cgc"] Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.960436 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ln52\" (UniqueName: \"kubernetes.io/projected/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-kube-api-access-2ln52\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.960502 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.960531 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-log-ovn\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.960557 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-additional-scripts\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.960591 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run-ovn\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:49 crc kubenswrapper[4813]: I1007 19:33:49.960632 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-scripts\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062398 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062439 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-log-ovn\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062468 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-additional-scripts\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062501 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run-ovn\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062543 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-scripts\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062586 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ln52\" (UniqueName: \"kubernetes.io/projected/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-kube-api-access-2ln52\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062733 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-log-ovn\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062733 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.062781 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run-ovn\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.063575 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-additional-scripts\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.064941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-scripts\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.091115 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ln52\" (UniqueName: \"kubernetes.io/projected/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-kube-api-access-2ln52\") pod \"ovn-controller-jd55f-config-c4cgc\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.143931 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jd55f" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.148073 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.262369 4813 generic.go:334] "Generic (PLEG): container finished" podID="efc337fb-89a0-4837-9137-ea0d3b8f51eb" containerID="0d523f0ed776f2b58b3cb228e461ae3998a527f908c88af624ff44b83e336abd" exitCode=0 Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.262451 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cae-account-create-qzhks" event={"ID":"efc337fb-89a0-4837-9137-ea0d3b8f51eb","Type":"ContainerDied","Data":"0d523f0ed776f2b58b3cb228e461ae3998a527f908c88af624ff44b83e336abd"} Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.262637 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cae-account-create-qzhks" event={"ID":"efc337fb-89a0-4837-9137-ea0d3b8f51eb","Type":"ContainerStarted","Data":"0c304989d855d843b6dac691e157cbed121fe59ccfe3ca5a3f7edc2e3c6e5bca"} Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.584658 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jd55f-config-c4cgc"] Oct 07 19:33:50 crc kubenswrapper[4813]: W1007 19:33:50.600630 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad9518e8_22cf_4c39_b8c6_15752d4a67c6.slice/crio-c67bba5ea0127443ea19a959e2e35e350579487f306a756642dae764925a4559 WatchSource:0}: Error finding container c67bba5ea0127443ea19a959e2e35e350579487f306a756642dae764925a4559: Status 404 returned error can't find the container with id c67bba5ea0127443ea19a959e2e35e350579487f306a756642dae764925a4559 Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.617545 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bed85fd-c03c-4733-a6e3-bbc61f25f09c" path="/var/lib/kubelet/pods/5bed85fd-c03c-4733-a6e3-bbc61f25f09c/volumes" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.779220 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.876705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mktm\" (UniqueName: \"kubernetes.io/projected/af27a525-e196-45c9-bf20-7bce89dfae1f-kube-api-access-4mktm\") pod \"af27a525-e196-45c9-bf20-7bce89dfae1f\" (UID: \"af27a525-e196-45c9-bf20-7bce89dfae1f\") " Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.882185 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af27a525-e196-45c9-bf20-7bce89dfae1f-kube-api-access-4mktm" (OuterVolumeSpecName: "kube-api-access-4mktm") pod "af27a525-e196-45c9-bf20-7bce89dfae1f" (UID: "af27a525-e196-45c9-bf20-7bce89dfae1f"). InnerVolumeSpecName "kube-api-access-4mktm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:50 crc kubenswrapper[4813]: I1007 19:33:50.979071 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mktm\" (UniqueName: \"kubernetes.io/projected/af27a525-e196-45c9-bf20-7bce89dfae1f-kube-api-access-4mktm\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.271857 4813 generic.go:334] "Generic (PLEG): container finished" podID="ad9518e8-22cf-4c39-b8c6-15752d4a67c6" containerID="b3ddda03f9d6d81d98bac55b702ac2918b76da0ab10ee0ff2e7fdca09cca212e" exitCode=0 Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.271957 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f-config-c4cgc" event={"ID":"ad9518e8-22cf-4c39-b8c6-15752d4a67c6","Type":"ContainerDied","Data":"b3ddda03f9d6d81d98bac55b702ac2918b76da0ab10ee0ff2e7fdca09cca212e"} Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.272298 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f-config-c4cgc" event={"ID":"ad9518e8-22cf-4c39-b8c6-15752d4a67c6","Type":"ContainerStarted","Data":"c67bba5ea0127443ea19a959e2e35e350579487f306a756642dae764925a4559"} Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.274879 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerID="50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646" exitCode=0 Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.274956 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49","Type":"ContainerDied","Data":"50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646"} Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.278087 4813 generic.go:334] "Generic (PLEG): container finished" podID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerID="98789f437cadf7459a98615391b7fd39c5b25d988dcb21b731b947d853a5d811" exitCode=0 Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.278150 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b693f559-87e2-41ef-94c0-56d76bd9ef00","Type":"ContainerDied","Data":"98789f437cadf7459a98615391b7fd39c5b25d988dcb21b731b947d853a5d811"} Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.291038 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-ab07-account-create-llmcr" Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.291031 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-ab07-account-create-llmcr" event={"ID":"af27a525-e196-45c9-bf20-7bce89dfae1f","Type":"ContainerDied","Data":"717dbd0b934d59dd8671bbbb5bf8da0fdc981171f49435ecefe1deb7901e1dcb"} Oct 07 19:33:51 crc kubenswrapper[4813]: I1007 19:33:51.291102 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="717dbd0b934d59dd8671bbbb5bf8da0fdc981171f49435ecefe1deb7901e1dcb" Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.078940 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.079239 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.079284 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.079929 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c4e3a874402bcde4b4b4d8190142ef2959a5d27f6fb1ca4f9803d48de7b2c187"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.079984 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://c4e3a874402bcde4b4b4d8190142ef2959a5d27f6fb1ca4f9803d48de7b2c187" gracePeriod=600 Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.301888 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="c4e3a874402bcde4b4b4d8190142ef2959a5d27f6fb1ca4f9803d48de7b2c187" exitCode=0 Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.301932 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"c4e3a874402bcde4b4b4d8190142ef2959a5d27f6fb1ca4f9803d48de7b2c187"} Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.301989 4813 scope.go:117] "RemoveContainer" containerID="65fe0b5a9444ed388154693078866b82b9f87cf7cbddae0e9656f26066276d1a" Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.306619 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49","Type":"ContainerStarted","Data":"5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825"} Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.306892 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:33:52 crc kubenswrapper[4813]: I1007 19:33:52.341922 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.101716008 podStartE2EDuration="1m8.341897565s" podCreationTimestamp="2025-10-07 19:32:44 +0000 UTC" firstStartedPulling="2025-10-07 19:32:46.24020419 +0000 UTC m=+892.318459801" lastFinishedPulling="2025-10-07 19:33:17.480385707 +0000 UTC m=+923.558641358" observedRunningTime="2025-10-07 19:33:52.334928732 +0000 UTC m=+958.413184383" watchObservedRunningTime="2025-10-07 19:33:52.341897565 +0000 UTC m=+958.420153176" Oct 07 19:33:56 crc kubenswrapper[4813]: I1007 19:33:56.793271 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:33:56 crc kubenswrapper[4813]: I1007 19:33:56.860816 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qmzgx"] Oct 07 19:33:56 crc kubenswrapper[4813]: I1007 19:33:56.861089 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-qmzgx" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerName="dnsmasq-dns" containerID="cri-o://751ab3ea2587e47b4b3d8686529b1ee934773e5a56066fc6654e0dc9a240b617" gracePeriod=10 Oct 07 19:33:57 crc kubenswrapper[4813]: I1007 19:33:57.358595 4813 generic.go:334] "Generic (PLEG): container finished" podID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerID="751ab3ea2587e47b4b3d8686529b1ee934773e5a56066fc6654e0dc9a240b617" exitCode=0 Oct 07 19:33:57 crc kubenswrapper[4813]: I1007 19:33:57.358655 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qmzgx" event={"ID":"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef","Type":"ContainerDied","Data":"751ab3ea2587e47b4b3d8686529b1ee934773e5a56066fc6654e0dc9a240b617"} Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.209179 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.322709 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.339240 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-additional-scripts\") pod \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.339333 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run\") pod \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.339442 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-log-ovn\") pod \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.339463 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run-ovn\") pod \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.339537 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-scripts\") pod \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.339563 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ln52\" (UniqueName: \"kubernetes.io/projected/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-kube-api-access-2ln52\") pod \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\" (UID: \"ad9518e8-22cf-4c39-b8c6-15752d4a67c6\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.340101 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ad9518e8-22cf-4c39-b8c6-15752d4a67c6" (UID: "ad9518e8-22cf-4c39-b8c6-15752d4a67c6"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.340202 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ad9518e8-22cf-4c39-b8c6-15752d4a67c6" (UID: "ad9518e8-22cf-4c39-b8c6-15752d4a67c6"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.345014 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ad9518e8-22cf-4c39-b8c6-15752d4a67c6" (UID: "ad9518e8-22cf-4c39-b8c6-15752d4a67c6"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.346049 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-scripts" (OuterVolumeSpecName: "scripts") pod "ad9518e8-22cf-4c39-b8c6-15752d4a67c6" (UID: "ad9518e8-22cf-4c39-b8c6-15752d4a67c6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.346747 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-kube-api-access-2ln52" (OuterVolumeSpecName: "kube-api-access-2ln52") pod "ad9518e8-22cf-4c39-b8c6-15752d4a67c6" (UID: "ad9518e8-22cf-4c39-b8c6-15752d4a67c6"). InnerVolumeSpecName "kube-api-access-2ln52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.340155 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run" (OuterVolumeSpecName: "var-run") pod "ad9518e8-22cf-4c39-b8c6-15752d4a67c6" (UID: "ad9518e8-22cf-4c39-b8c6-15752d4a67c6"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.384553 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5cae-account-create-qzhks" event={"ID":"efc337fb-89a0-4837-9137-ea0d3b8f51eb","Type":"ContainerDied","Data":"0c304989d855d843b6dac691e157cbed121fe59ccfe3ca5a3f7edc2e3c6e5bca"} Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.384591 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c304989d855d843b6dac691e157cbed121fe59ccfe3ca5a3f7edc2e3c6e5bca" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.384656 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5cae-account-create-qzhks" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.388656 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jd55f-config-c4cgc" event={"ID":"ad9518e8-22cf-4c39-b8c6-15752d4a67c6","Type":"ContainerDied","Data":"c67bba5ea0127443ea19a959e2e35e350579487f306a756642dae764925a4559"} Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.388707 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c67bba5ea0127443ea19a959e2e35e350579487f306a756642dae764925a4559" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.388673 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jd55f-config-c4cgc" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.410144 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441215 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fk99\" (UniqueName: \"kubernetes.io/projected/efc337fb-89a0-4837-9137-ea0d3b8f51eb-kube-api-access-7fk99\") pod \"efc337fb-89a0-4837-9137-ea0d3b8f51eb\" (UID: \"efc337fb-89a0-4837-9137-ea0d3b8f51eb\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441647 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441665 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ln52\" (UniqueName: \"kubernetes.io/projected/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-kube-api-access-2ln52\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441678 4813 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441690 4813 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441698 4813 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.441718 4813 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad9518e8-22cf-4c39-b8c6-15752d4a67c6-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.446578 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc337fb-89a0-4837-9137-ea0d3b8f51eb-kube-api-access-7fk99" (OuterVolumeSpecName: "kube-api-access-7fk99") pod "efc337fb-89a0-4837-9137-ea0d3b8f51eb" (UID: "efc337fb-89a0-4837-9137-ea0d3b8f51eb"). InnerVolumeSpecName "kube-api-access-7fk99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.542953 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-nb\") pod \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.543245 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfv69\" (UniqueName: \"kubernetes.io/projected/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-kube-api-access-qfv69\") pod \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.543288 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-config\") pod \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.543398 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-dns-svc\") pod \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.543451 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-sb\") pod \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\" (UID: \"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef\") " Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.543723 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fk99\" (UniqueName: \"kubernetes.io/projected/efc337fb-89a0-4837-9137-ea0d3b8f51eb-kube-api-access-7fk99\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.548405 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-kube-api-access-qfv69" (OuterVolumeSpecName: "kube-api-access-qfv69") pod "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" (UID: "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef"). InnerVolumeSpecName "kube-api-access-qfv69". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.593139 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" (UID: "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.595895 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-config" (OuterVolumeSpecName: "config") pod "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" (UID: "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.601289 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" (UID: "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.602521 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" (UID: "da6998c9-1d0e-45b6-9cbe-e65c2bb260ef"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.645474 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.645501 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.645509 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.645519 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfv69\" (UniqueName: \"kubernetes.io/projected/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-kube-api-access-qfv69\") on node \"crc\" DevicePath \"\"" Oct 07 19:33:59 crc kubenswrapper[4813]: I1007 19:33:59.645666 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.292680 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jd55f-config-c4cgc"] Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.299894 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jd55f-config-c4cgc"] Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.397081 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59"} Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.399338 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6x4qp" event={"ID":"9d72dd22-ed08-4510-9c62-d01807e11064","Type":"ContainerStarted","Data":"954463ed877a1da5bde94d44c8aa23324d076ea764cb58ad0114403ad460ad7d"} Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.401885 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b693f559-87e2-41ef-94c0-56d76bd9ef00","Type":"ContainerStarted","Data":"3f356796aac90f5d750514cac672a095f975cec17e70141ca0af6d3bcaa55118"} Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.402147 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.404446 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-qmzgx" event={"ID":"da6998c9-1d0e-45b6-9cbe-e65c2bb260ef","Type":"ContainerDied","Data":"4dc48c9ad019d14a75a10529eb2c56289c3d223b70694f30a28c2a37b21e0512"} Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.404560 4813 scope.go:117] "RemoveContainer" containerID="751ab3ea2587e47b4b3d8686529b1ee934773e5a56066fc6654e0dc9a240b617" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.404498 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-qmzgx" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.427150 4813 scope.go:117] "RemoveContainer" containerID="c924b0d98cb3a02699a4f3bc9f32faf2ff2ade3efed234a48be9061cc5f9d438" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.452607 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=45.654886321 podStartE2EDuration="1m17.452584585s" podCreationTimestamp="2025-10-07 19:32:43 +0000 UTC" firstStartedPulling="2025-10-07 19:32:45.697839544 +0000 UTC m=+891.776095155" lastFinishedPulling="2025-10-07 19:33:17.495537808 +0000 UTC m=+923.573793419" observedRunningTime="2025-10-07 19:34:00.441753334 +0000 UTC m=+966.520008945" watchObservedRunningTime="2025-10-07 19:34:00.452584585 +0000 UTC m=+966.530840216" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.481662 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-6x4qp" podStartSLOduration=2.028692317 podStartE2EDuration="16.481640632s" podCreationTimestamp="2025-10-07 19:33:44 +0000 UTC" firstStartedPulling="2025-10-07 19:33:44.748744737 +0000 UTC m=+950.827000348" lastFinishedPulling="2025-10-07 19:33:59.201693042 +0000 UTC m=+965.279948663" observedRunningTime="2025-10-07 19:34:00.473429084 +0000 UTC m=+966.551684695" watchObservedRunningTime="2025-10-07 19:34:00.481640632 +0000 UTC m=+966.559896243" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.496828 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qmzgx"] Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.507051 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-qmzgx"] Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.612875 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad9518e8-22cf-4c39-b8c6-15752d4a67c6" path="/var/lib/kubelet/pods/ad9518e8-22cf-4c39-b8c6-15752d4a67c6/volumes" Oct 07 19:34:00 crc kubenswrapper[4813]: I1007 19:34:00.613648 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" path="/var/lib/kubelet/pods/da6998c9-1d0e-45b6-9cbe-e65c2bb260ef/volumes" Oct 07 19:34:05 crc kubenswrapper[4813]: I1007 19:34:05.604518 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:34:06 crc kubenswrapper[4813]: I1007 19:34:06.460975 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d72dd22-ed08-4510-9c62-d01807e11064" containerID="954463ed877a1da5bde94d44c8aa23324d076ea764cb58ad0114403ad460ad7d" exitCode=0 Oct 07 19:34:06 crc kubenswrapper[4813]: I1007 19:34:06.461076 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6x4qp" event={"ID":"9d72dd22-ed08-4510-9c62-d01807e11064","Type":"ContainerDied","Data":"954463ed877a1da5bde94d44c8aa23324d076ea764cb58ad0114403ad460ad7d"} Oct 07 19:34:07 crc kubenswrapper[4813]: I1007 19:34:07.897424 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6x4qp" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.016362 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-config-data\") pod \"9d72dd22-ed08-4510-9c62-d01807e11064\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.016403 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-combined-ca-bundle\") pod \"9d72dd22-ed08-4510-9c62-d01807e11064\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.016522 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-db-sync-config-data\") pod \"9d72dd22-ed08-4510-9c62-d01807e11064\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.016544 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk6sd\" (UniqueName: \"kubernetes.io/projected/9d72dd22-ed08-4510-9c62-d01807e11064-kube-api-access-fk6sd\") pod \"9d72dd22-ed08-4510-9c62-d01807e11064\" (UID: \"9d72dd22-ed08-4510-9c62-d01807e11064\") " Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.023137 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9d72dd22-ed08-4510-9c62-d01807e11064" (UID: "9d72dd22-ed08-4510-9c62-d01807e11064"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.026488 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d72dd22-ed08-4510-9c62-d01807e11064-kube-api-access-fk6sd" (OuterVolumeSpecName: "kube-api-access-fk6sd") pod "9d72dd22-ed08-4510-9c62-d01807e11064" (UID: "9d72dd22-ed08-4510-9c62-d01807e11064"). InnerVolumeSpecName "kube-api-access-fk6sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.048443 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d72dd22-ed08-4510-9c62-d01807e11064" (UID: "9d72dd22-ed08-4510-9c62-d01807e11064"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.063457 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-config-data" (OuterVolumeSpecName: "config-data") pod "9d72dd22-ed08-4510-9c62-d01807e11064" (UID: "9d72dd22-ed08-4510-9c62-d01807e11064"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.117864 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.117889 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.117900 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9d72dd22-ed08-4510-9c62-d01807e11064-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.117909 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk6sd\" (UniqueName: \"kubernetes.io/projected/9d72dd22-ed08-4510-9c62-d01807e11064-kube-api-access-fk6sd\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.476246 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6x4qp" event={"ID":"9d72dd22-ed08-4510-9c62-d01807e11064","Type":"ContainerDied","Data":"10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914"} Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.476294 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914" Oct 07 19:34:08 crc kubenswrapper[4813]: I1007 19:34:08.476358 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6x4qp" Oct 07 19:34:08 crc kubenswrapper[4813]: E1007 19:34:08.665945 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d72dd22_ed08_4510_9c62_d01807e11064.slice/crio-10753472e2c37a808d3008f7975a8eb270ba0cea9e38746ba0538616a9d6a914\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d72dd22_ed08_4510_9c62_d01807e11064.slice\": RecentStats: unable to find data in memory cache]" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.030621 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-vsctw"] Oct 07 19:34:09 crc kubenswrapper[4813]: E1007 19:34:09.031169 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9518e8-22cf-4c39-b8c6-15752d4a67c6" containerName="ovn-config" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031181 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9518e8-22cf-4c39-b8c6-15752d4a67c6" containerName="ovn-config" Oct 07 19:34:09 crc kubenswrapper[4813]: E1007 19:34:09.031193 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerName="dnsmasq-dns" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031198 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerName="dnsmasq-dns" Oct 07 19:34:09 crc kubenswrapper[4813]: E1007 19:34:09.031210 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d72dd22-ed08-4510-9c62-d01807e11064" containerName="glance-db-sync" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031215 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d72dd22-ed08-4510-9c62-d01807e11064" containerName="glance-db-sync" Oct 07 19:34:09 crc kubenswrapper[4813]: E1007 19:34:09.031229 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af27a525-e196-45c9-bf20-7bce89dfae1f" containerName="mariadb-account-create" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031235 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="af27a525-e196-45c9-bf20-7bce89dfae1f" containerName="mariadb-account-create" Oct 07 19:34:09 crc kubenswrapper[4813]: E1007 19:34:09.031277 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc337fb-89a0-4837-9137-ea0d3b8f51eb" containerName="mariadb-account-create" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031282 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc337fb-89a0-4837-9137-ea0d3b8f51eb" containerName="mariadb-account-create" Oct 07 19:34:09 crc kubenswrapper[4813]: E1007 19:34:09.031304 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerName="init" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031309 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerName="init" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031466 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="efc337fb-89a0-4837-9137-ea0d3b8f51eb" containerName="mariadb-account-create" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031475 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d72dd22-ed08-4510-9c62-d01807e11064" containerName="glance-db-sync" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031484 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="af27a525-e196-45c9-bf20-7bce89dfae1f" containerName="mariadb-account-create" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031499 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9518e8-22cf-4c39-b8c6-15752d4a67c6" containerName="ovn-config" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.031509 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="da6998c9-1d0e-45b6-9cbe-e65c2bb260ef" containerName="dnsmasq-dns" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.032343 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.060488 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-vsctw"] Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.132604 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.132692 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-config\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.132728 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.132757 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.132795 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.132830 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7fng\" (UniqueName: \"kubernetes.io/projected/4dfa2131-81b6-474b-aa07-08ec422fa6bd-kube-api-access-p7fng\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.233682 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-config\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.233741 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.233775 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.233796 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.233832 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7fng\" (UniqueName: \"kubernetes.io/projected/4dfa2131-81b6-474b-aa07-08ec422fa6bd-kube-api-access-p7fng\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.233867 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.234796 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-config\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.234931 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.235151 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.235170 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.236078 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.264806 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7fng\" (UniqueName: \"kubernetes.io/projected/4dfa2131-81b6-474b-aa07-08ec422fa6bd-kube-api-access-p7fng\") pod \"dnsmasq-dns-7ff5475cc9-vsctw\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.359769 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:09 crc kubenswrapper[4813]: I1007 19:34:09.774997 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-vsctw"] Oct 07 19:34:10 crc kubenswrapper[4813]: I1007 19:34:10.495684 4813 generic.go:334] "Generic (PLEG): container finished" podID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerID="8b6a7195daccc096aa75681a6a63c5cfbd74902e738d147c12dad16ee4c7dcab" exitCode=0 Oct 07 19:34:10 crc kubenswrapper[4813]: I1007 19:34:10.495756 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" event={"ID":"4dfa2131-81b6-474b-aa07-08ec422fa6bd","Type":"ContainerDied","Data":"8b6a7195daccc096aa75681a6a63c5cfbd74902e738d147c12dad16ee4c7dcab"} Oct 07 19:34:10 crc kubenswrapper[4813]: I1007 19:34:10.495930 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" event={"ID":"4dfa2131-81b6-474b-aa07-08ec422fa6bd","Type":"ContainerStarted","Data":"eeb9f4eaa434de40952f0d42bdb9043eb3c1628cb005d23b589a08d114937710"} Oct 07 19:34:11 crc kubenswrapper[4813]: I1007 19:34:11.505042 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" event={"ID":"4dfa2131-81b6-474b-aa07-08ec422fa6bd","Type":"ContainerStarted","Data":"ca4bf6ed6a57a79b5fa3e35d320f82e69a95a3f2f1dac428bccd806a7cc37339"} Oct 07 19:34:11 crc kubenswrapper[4813]: I1007 19:34:11.505605 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:11 crc kubenswrapper[4813]: I1007 19:34:11.531296 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" podStartSLOduration=3.5312703 podStartE2EDuration="3.5312703s" podCreationTimestamp="2025-10-07 19:34:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:34:11.523952759 +0000 UTC m=+977.602208370" watchObservedRunningTime="2025-10-07 19:34:11.5312703 +0000 UTC m=+977.609525911" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.183452 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.479953 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-886kw"] Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.481235 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-886kw" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.496437 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-886kw"] Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.550613 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz6k4\" (UniqueName: \"kubernetes.io/projected/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d-kube-api-access-nz6k4\") pod \"cinder-db-create-886kw\" (UID: \"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d\") " pod="openstack/cinder-db-create-886kw" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.651558 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz6k4\" (UniqueName: \"kubernetes.io/projected/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d-kube-api-access-nz6k4\") pod \"cinder-db-create-886kw\" (UID: \"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d\") " pod="openstack/cinder-db-create-886kw" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.690088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz6k4\" (UniqueName: \"kubernetes.io/projected/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d-kube-api-access-nz6k4\") pod \"cinder-db-create-886kw\" (UID: \"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d\") " pod="openstack/cinder-db-create-886kw" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.693406 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-275zb"] Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.694451 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-275zb" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.698587 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-275zb"] Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.827433 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-886kw" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.856282 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q85n\" (UniqueName: \"kubernetes.io/projected/d68c3899-a167-4624-96fa-129664c55bff-kube-api-access-6q85n\") pod \"barbican-db-create-275zb\" (UID: \"d68c3899-a167-4624-96fa-129664c55bff\") " pod="openstack/barbican-db-create-275zb" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.933779 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-2bcbb"] Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.941388 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.943999 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2bcbb"] Oct 07 19:34:15 crc kubenswrapper[4813]: I1007 19:34:15.958851 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q85n\" (UniqueName: \"kubernetes.io/projected/d68c3899-a167-4624-96fa-129664c55bff-kube-api-access-6q85n\") pod \"barbican-db-create-275zb\" (UID: \"d68c3899-a167-4624-96fa-129664c55bff\") " pod="openstack/barbican-db-create-275zb" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.012088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q85n\" (UniqueName: \"kubernetes.io/projected/d68c3899-a167-4624-96fa-129664c55bff-kube-api-access-6q85n\") pod \"barbican-db-create-275zb\" (UID: \"d68c3899-a167-4624-96fa-129664c55bff\") " pod="openstack/barbican-db-create-275zb" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.044687 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-275zb" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.066256 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpk28\" (UniqueName: \"kubernetes.io/projected/7dbca003-4721-4826-ba32-c996b89f1068-kube-api-access-bpk28\") pod \"neutron-db-create-2bcbb\" (UID: \"7dbca003-4721-4826-ba32-c996b89f1068\") " pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.074676 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-xbcqc"] Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.076402 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.080642 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.080827 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.082981 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gbnk5" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.096797 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.134838 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xbcqc"] Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.168313 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpk28\" (UniqueName: \"kubernetes.io/projected/7dbca003-4721-4826-ba32-c996b89f1068-kube-api-access-bpk28\") pod \"neutron-db-create-2bcbb\" (UID: \"7dbca003-4721-4826-ba32-c996b89f1068\") " pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.168419 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp8g8\" (UniqueName: \"kubernetes.io/projected/b61b5e30-f563-4f0c-9578-4953d831ffb9-kube-api-access-bp8g8\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.168452 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-config-data\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.168474 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-combined-ca-bundle\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.215105 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpk28\" (UniqueName: \"kubernetes.io/projected/7dbca003-4721-4826-ba32-c996b89f1068-kube-api-access-bpk28\") pod \"neutron-db-create-2bcbb\" (UID: \"7dbca003-4721-4826-ba32-c996b89f1068\") " pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.271691 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp8g8\" (UniqueName: \"kubernetes.io/projected/b61b5e30-f563-4f0c-9578-4953d831ffb9-kube-api-access-bp8g8\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.271995 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-config-data\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.272029 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-combined-ca-bundle\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.276710 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-config-data\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.277230 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-combined-ca-bundle\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.300830 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp8g8\" (UniqueName: \"kubernetes.io/projected/b61b5e30-f563-4f0c-9578-4953d831ffb9-kube-api-access-bp8g8\") pod \"keystone-db-sync-xbcqc\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:16 crc kubenswrapper[4813]: I1007 19:34:16.461213 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-886kw"] Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:16.483996 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:16.484451 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:16.527196 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-275zb"] Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:16.552469 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-886kw" event={"ID":"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d","Type":"ContainerStarted","Data":"1730e090d296cf978dadd026ab98c8d3a2f3db76896a13d2303b482c4e164f24"} Oct 07 19:34:17 crc kubenswrapper[4813]: W1007 19:34:16.562756 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd68c3899_a167_4624_96fa_129664c55bff.slice/crio-278238ba28c69c9a7d28c3b89dd28a095fe30e543236d535071f157568aec272 WatchSource:0}: Error finding container 278238ba28c69c9a7d28c3b89dd28a095fe30e543236d535071f157568aec272: Status 404 returned error can't find the container with id 278238ba28c69c9a7d28c3b89dd28a095fe30e543236d535071f157568aec272 Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.525721 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2bcbb"] Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.533664 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xbcqc"] Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.564288 4813 generic.go:334] "Generic (PLEG): container finished" podID="0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d" containerID="3bb9be13cd675da54f97286f00f03d0b025d6dd8a865eaa1b9553b9db71dc7fe" exitCode=0 Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.564489 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-886kw" event={"ID":"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d","Type":"ContainerDied","Data":"3bb9be13cd675da54f97286f00f03d0b025d6dd8a865eaa1b9553b9db71dc7fe"} Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.570212 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2bcbb" event={"ID":"7dbca003-4721-4826-ba32-c996b89f1068","Type":"ContainerStarted","Data":"d1392bca1fe1a109b83fbba08b38cee6188d08a13fcef5a966701bb24d9cbd95"} Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.572389 4813 generic.go:334] "Generic (PLEG): container finished" podID="d68c3899-a167-4624-96fa-129664c55bff" containerID="f9cf75cc060d2be879a21134417d2227dca3b23828d8148054a09752fc97d512" exitCode=0 Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.572474 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-275zb" event={"ID":"d68c3899-a167-4624-96fa-129664c55bff","Type":"ContainerDied","Data":"f9cf75cc060d2be879a21134417d2227dca3b23828d8148054a09752fc97d512"} Oct 07 19:34:17 crc kubenswrapper[4813]: I1007 19:34:17.572524 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-275zb" event={"ID":"d68c3899-a167-4624-96fa-129664c55bff","Type":"ContainerStarted","Data":"278238ba28c69c9a7d28c3b89dd28a095fe30e543236d535071f157568aec272"} Oct 07 19:34:18 crc kubenswrapper[4813]: I1007 19:34:18.584834 4813 generic.go:334] "Generic (PLEG): container finished" podID="7dbca003-4721-4826-ba32-c996b89f1068" containerID="e7adc0732bbee54074cab8a3e1fdf7a171500b6e73adc26878b4884927e1671e" exitCode=0 Oct 07 19:34:18 crc kubenswrapper[4813]: I1007 19:34:18.585220 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2bcbb" event={"ID":"7dbca003-4721-4826-ba32-c996b89f1068","Type":"ContainerDied","Data":"e7adc0732bbee54074cab8a3e1fdf7a171500b6e73adc26878b4884927e1671e"} Oct 07 19:34:18 crc kubenswrapper[4813]: I1007 19:34:18.587481 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xbcqc" event={"ID":"b61b5e30-f563-4f0c-9578-4953d831ffb9","Type":"ContainerStarted","Data":"f1a04205e2b24d3939608b4c692bbdcabed12c7e2bb020fd754c3963c42aa828"} Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.062553 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-886kw" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.087584 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-275zb" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.230984 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz6k4\" (UniqueName: \"kubernetes.io/projected/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d-kube-api-access-nz6k4\") pod \"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d\" (UID: \"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d\") " Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.231374 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q85n\" (UniqueName: \"kubernetes.io/projected/d68c3899-a167-4624-96fa-129664c55bff-kube-api-access-6q85n\") pod \"d68c3899-a167-4624-96fa-129664c55bff\" (UID: \"d68c3899-a167-4624-96fa-129664c55bff\") " Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.236796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d68c3899-a167-4624-96fa-129664c55bff-kube-api-access-6q85n" (OuterVolumeSpecName: "kube-api-access-6q85n") pod "d68c3899-a167-4624-96fa-129664c55bff" (UID: "d68c3899-a167-4624-96fa-129664c55bff"). InnerVolumeSpecName "kube-api-access-6q85n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.242009 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d-kube-api-access-nz6k4" (OuterVolumeSpecName: "kube-api-access-nz6k4") pod "0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d" (UID: "0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d"). InnerVolumeSpecName "kube-api-access-nz6k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.334214 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz6k4\" (UniqueName: \"kubernetes.io/projected/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d-kube-api-access-nz6k4\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.334253 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q85n\" (UniqueName: \"kubernetes.io/projected/d68c3899-a167-4624-96fa-129664c55bff-kube-api-access-6q85n\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.361570 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.446486 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-kwxf9"] Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.446734 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerName="dnsmasq-dns" containerID="cri-o://e2e164c13fcc7c95481be37c3139b57bd833a269c3902fe65b00004f0b7f4671" gracePeriod=10 Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.601253 4813 generic.go:334] "Generic (PLEG): container finished" podID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerID="e2e164c13fcc7c95481be37c3139b57bd833a269c3902fe65b00004f0b7f4671" exitCode=0 Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.601340 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" event={"ID":"b33dcc21-d1fc-4056-8f3b-49acddf0650f","Type":"ContainerDied","Data":"e2e164c13fcc7c95481be37c3139b57bd833a269c3902fe65b00004f0b7f4671"} Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.612591 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-275zb" event={"ID":"d68c3899-a167-4624-96fa-129664c55bff","Type":"ContainerDied","Data":"278238ba28c69c9a7d28c3b89dd28a095fe30e543236d535071f157568aec272"} Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.612627 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="278238ba28c69c9a7d28c3b89dd28a095fe30e543236d535071f157568aec272" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.612673 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-275zb" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.620746 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-886kw" event={"ID":"0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d","Type":"ContainerDied","Data":"1730e090d296cf978dadd026ab98c8d3a2f3db76896a13d2303b482c4e164f24"} Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.620795 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1730e090d296cf978dadd026ab98c8d3a2f3db76896a13d2303b482c4e164f24" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.620860 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-886kw" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.883370 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:34:19 crc kubenswrapper[4813]: I1007 19:34:19.953151 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052195 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-swift-storage-0\") pod \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052258 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gm54m\" (UniqueName: \"kubernetes.io/projected/b33dcc21-d1fc-4056-8f3b-49acddf0650f-kube-api-access-gm54m\") pod \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052293 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-svc\") pod \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052347 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-config\") pod \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052455 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-sb\") pod \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052508 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpk28\" (UniqueName: \"kubernetes.io/projected/7dbca003-4721-4826-ba32-c996b89f1068-kube-api-access-bpk28\") pod \"7dbca003-4721-4826-ba32-c996b89f1068\" (UID: \"7dbca003-4721-4826-ba32-c996b89f1068\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.052529 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-nb\") pod \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\" (UID: \"b33dcc21-d1fc-4056-8f3b-49acddf0650f\") " Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.056162 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dbca003-4721-4826-ba32-c996b89f1068-kube-api-access-bpk28" (OuterVolumeSpecName: "kube-api-access-bpk28") pod "7dbca003-4721-4826-ba32-c996b89f1068" (UID: "7dbca003-4721-4826-ba32-c996b89f1068"). InnerVolumeSpecName "kube-api-access-bpk28". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.080934 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b33dcc21-d1fc-4056-8f3b-49acddf0650f-kube-api-access-gm54m" (OuterVolumeSpecName: "kube-api-access-gm54m") pod "b33dcc21-d1fc-4056-8f3b-49acddf0650f" (UID: "b33dcc21-d1fc-4056-8f3b-49acddf0650f"). InnerVolumeSpecName "kube-api-access-gm54m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.103185 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b33dcc21-d1fc-4056-8f3b-49acddf0650f" (UID: "b33dcc21-d1fc-4056-8f3b-49acddf0650f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.121356 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b33dcc21-d1fc-4056-8f3b-49acddf0650f" (UID: "b33dcc21-d1fc-4056-8f3b-49acddf0650f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.128997 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-config" (OuterVolumeSpecName: "config") pod "b33dcc21-d1fc-4056-8f3b-49acddf0650f" (UID: "b33dcc21-d1fc-4056-8f3b-49acddf0650f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.148273 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b33dcc21-d1fc-4056-8f3b-49acddf0650f" (UID: "b33dcc21-d1fc-4056-8f3b-49acddf0650f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.154754 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gm54m\" (UniqueName: \"kubernetes.io/projected/b33dcc21-d1fc-4056-8f3b-49acddf0650f-kube-api-access-gm54m\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.154785 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.154795 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.154805 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpk28\" (UniqueName: \"kubernetes.io/projected/7dbca003-4721-4826-ba32-c996b89f1068-kube-api-access-bpk28\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.154815 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.154825 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.159892 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b33dcc21-d1fc-4056-8f3b-49acddf0650f" (UID: "b33dcc21-d1fc-4056-8f3b-49acddf0650f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.256110 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b33dcc21-d1fc-4056-8f3b-49acddf0650f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.633374 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" event={"ID":"b33dcc21-d1fc-4056-8f3b-49acddf0650f","Type":"ContainerDied","Data":"0fe4618d2a470cc8a04c5e9ba5cb92a6199c483ce0ffdee4fbad527c24815816"} Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.634407 4813 scope.go:117] "RemoveContainer" containerID="e2e164c13fcc7c95481be37c3139b57bd833a269c3902fe65b00004f0b7f4671" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.633432 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-kwxf9" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.638297 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2bcbb" event={"ID":"7dbca003-4721-4826-ba32-c996b89f1068","Type":"ContainerDied","Data":"d1392bca1fe1a109b83fbba08b38cee6188d08a13fcef5a966701bb24d9cbd95"} Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.638352 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1392bca1fe1a109b83fbba08b38cee6188d08a13fcef5a966701bb24d9cbd95" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.638535 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2bcbb" Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.657461 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-kwxf9"] Oct 07 19:34:20 crc kubenswrapper[4813]: I1007 19:34:20.664098 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-kwxf9"] Oct 07 19:34:22 crc kubenswrapper[4813]: I1007 19:34:22.622413 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" path="/var/lib/kubelet/pods/b33dcc21-d1fc-4056-8f3b-49acddf0650f/volumes" Oct 07 19:34:23 crc kubenswrapper[4813]: I1007 19:34:23.065779 4813 scope.go:117] "RemoveContainer" containerID="936e5dd986f371614184c8ef5fcc173130090ec90defe42299fb8b1e66fa7287" Oct 07 19:34:23 crc kubenswrapper[4813]: I1007 19:34:23.662905 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xbcqc" event={"ID":"b61b5e30-f563-4f0c-9578-4953d831ffb9","Type":"ContainerStarted","Data":"8edb361aa611b6752b45925da9abf8aaff9d6dc46cd831bedf4f48d2029eb028"} Oct 07 19:34:23 crc kubenswrapper[4813]: I1007 19:34:23.684403 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-xbcqc" podStartSLOduration=2.073790347 podStartE2EDuration="7.684384853s" podCreationTimestamp="2025-10-07 19:34:16 +0000 UTC" firstStartedPulling="2025-10-07 19:34:17.558580095 +0000 UTC m=+983.636835706" lastFinishedPulling="2025-10-07 19:34:23.169174601 +0000 UTC m=+989.247430212" observedRunningTime="2025-10-07 19:34:23.677783302 +0000 UTC m=+989.756038913" watchObservedRunningTime="2025-10-07 19:34:23.684384853 +0000 UTC m=+989.762640474" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.748156 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-a752-account-create-zm29m"] Oct 07 19:34:25 crc kubenswrapper[4813]: E1007 19:34:25.748815 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d68c3899-a167-4624-96fa-129664c55bff" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.748831 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d68c3899-a167-4624-96fa-129664c55bff" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: E1007 19:34:25.748846 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dbca003-4721-4826-ba32-c996b89f1068" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.748854 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dbca003-4721-4826-ba32-c996b89f1068" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: E1007 19:34:25.748867 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerName="dnsmasq-dns" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.748875 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerName="dnsmasq-dns" Oct 07 19:34:25 crc kubenswrapper[4813]: E1007 19:34:25.748890 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerName="init" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.748898 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerName="init" Oct 07 19:34:25 crc kubenswrapper[4813]: E1007 19:34:25.748919 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.748927 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.749151 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b33dcc21-d1fc-4056-8f3b-49acddf0650f" containerName="dnsmasq-dns" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.749166 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.749182 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d68c3899-a167-4624-96fa-129664c55bff" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.749196 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dbca003-4721-4826-ba32-c996b89f1068" containerName="mariadb-database-create" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.749808 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.752553 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.774126 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a752-account-create-zm29m"] Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.855407 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dntcl\" (UniqueName: \"kubernetes.io/projected/aee38026-115c-4782-bdde-eadd7cd26d62-kube-api-access-dntcl\") pod \"cinder-a752-account-create-zm29m\" (UID: \"aee38026-115c-4782-bdde-eadd7cd26d62\") " pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.946820 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-3bbe-account-create-gjbjp"] Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.948051 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.950549 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.957174 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dntcl\" (UniqueName: \"kubernetes.io/projected/aee38026-115c-4782-bdde-eadd7cd26d62-kube-api-access-dntcl\") pod \"cinder-a752-account-create-zm29m\" (UID: \"aee38026-115c-4782-bdde-eadd7cd26d62\") " pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.961778 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3bbe-account-create-gjbjp"] Oct 07 19:34:25 crc kubenswrapper[4813]: I1007 19:34:25.986153 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dntcl\" (UniqueName: \"kubernetes.io/projected/aee38026-115c-4782-bdde-eadd7cd26d62-kube-api-access-dntcl\") pod \"cinder-a752-account-create-zm29m\" (UID: \"aee38026-115c-4782-bdde-eadd7cd26d62\") " pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.058875 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j65l5\" (UniqueName: \"kubernetes.io/projected/a9fc7274-f1a7-436c-bd91-da50a3c3607b-kube-api-access-j65l5\") pod \"barbican-3bbe-account-create-gjbjp\" (UID: \"a9fc7274-f1a7-436c-bd91-da50a3c3607b\") " pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.084466 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.148777 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-28a4-account-create-5qhdj"] Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.150152 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.156063 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.161341 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j65l5\" (UniqueName: \"kubernetes.io/projected/a9fc7274-f1a7-436c-bd91-da50a3c3607b-kube-api-access-j65l5\") pod \"barbican-3bbe-account-create-gjbjp\" (UID: \"a9fc7274-f1a7-436c-bd91-da50a3c3607b\") " pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.166435 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-28a4-account-create-5qhdj"] Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.187073 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j65l5\" (UniqueName: \"kubernetes.io/projected/a9fc7274-f1a7-436c-bd91-da50a3c3607b-kube-api-access-j65l5\") pod \"barbican-3bbe-account-create-gjbjp\" (UID: \"a9fc7274-f1a7-436c-bd91-da50a3c3607b\") " pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.263198 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sgcn\" (UniqueName: \"kubernetes.io/projected/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2-kube-api-access-7sgcn\") pod \"neutron-28a4-account-create-5qhdj\" (UID: \"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2\") " pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.267852 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.365573 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sgcn\" (UniqueName: \"kubernetes.io/projected/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2-kube-api-access-7sgcn\") pod \"neutron-28a4-account-create-5qhdj\" (UID: \"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2\") " pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.385849 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sgcn\" (UniqueName: \"kubernetes.io/projected/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2-kube-api-access-7sgcn\") pod \"neutron-28a4-account-create-5qhdj\" (UID: \"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2\") " pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.550500 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a752-account-create-zm29m"] Oct 07 19:34:26 crc kubenswrapper[4813]: W1007 19:34:26.556848 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaee38026_115c_4782_bdde_eadd7cd26d62.slice/crio-05512ef161c1c361437dbb72c41fe040e583e06c22542c1aaee9720272ad594f WatchSource:0}: Error finding container 05512ef161c1c361437dbb72c41fe040e583e06c22542c1aaee9720272ad594f: Status 404 returned error can't find the container with id 05512ef161c1c361437dbb72c41fe040e583e06c22542c1aaee9720272ad594f Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.561543 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.694399 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a752-account-create-zm29m" event={"ID":"aee38026-115c-4782-bdde-eadd7cd26d62","Type":"ContainerStarted","Data":"05512ef161c1c361437dbb72c41fe040e583e06c22542c1aaee9720272ad594f"} Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.726477 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3bbe-account-create-gjbjp"] Oct 07 19:34:26 crc kubenswrapper[4813]: W1007 19:34:26.733642 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9fc7274_f1a7_436c_bd91_da50a3c3607b.slice/crio-17065118710c37a011fc37b7d3c4ce57a6724bd8ffd812659b3e5c4899a678bf WatchSource:0}: Error finding container 17065118710c37a011fc37b7d3c4ce57a6724bd8ffd812659b3e5c4899a678bf: Status 404 returned error can't find the container with id 17065118710c37a011fc37b7d3c4ce57a6724bd8ffd812659b3e5c4899a678bf Oct 07 19:34:26 crc kubenswrapper[4813]: I1007 19:34:26.994689 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-28a4-account-create-5qhdj"] Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.713647 4813 generic.go:334] "Generic (PLEG): container finished" podID="a9fc7274-f1a7-436c-bd91-da50a3c3607b" containerID="6250123bab676b30ece4fdb536cd6d13ef1f0f3f511a7259fb7fdd1fc7cdf3d8" exitCode=0 Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.713722 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3bbe-account-create-gjbjp" event={"ID":"a9fc7274-f1a7-436c-bd91-da50a3c3607b","Type":"ContainerDied","Data":"6250123bab676b30ece4fdb536cd6d13ef1f0f3f511a7259fb7fdd1fc7cdf3d8"} Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.714099 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3bbe-account-create-gjbjp" event={"ID":"a9fc7274-f1a7-436c-bd91-da50a3c3607b","Type":"ContainerStarted","Data":"17065118710c37a011fc37b7d3c4ce57a6724bd8ffd812659b3e5c4899a678bf"} Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.718599 4813 generic.go:334] "Generic (PLEG): container finished" podID="54066ef9-3488-4b4c-a9ba-ba8a6cf48df2" containerID="284db407c3d4776dc2ba72d3a97c56fd5f9d71c83b971aca9750e3b2b311584d" exitCode=0 Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.718700 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-28a4-account-create-5qhdj" event={"ID":"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2","Type":"ContainerDied","Data":"284db407c3d4776dc2ba72d3a97c56fd5f9d71c83b971aca9750e3b2b311584d"} Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.718752 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-28a4-account-create-5qhdj" event={"ID":"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2","Type":"ContainerStarted","Data":"933ca8d19726233b59ad2034302850bf7b7188525dacfcb55a0c9b6df210a614"} Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.721542 4813 generic.go:334] "Generic (PLEG): container finished" podID="aee38026-115c-4782-bdde-eadd7cd26d62" containerID="c5aae9725308edf30ac4733ae22ac760492ef832926c6c7c718205bafbf94358" exitCode=0 Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.721660 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a752-account-create-zm29m" event={"ID":"aee38026-115c-4782-bdde-eadd7cd26d62","Type":"ContainerDied","Data":"c5aae9725308edf30ac4733ae22ac760492ef832926c6c7c718205bafbf94358"} Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.726148 4813 generic.go:334] "Generic (PLEG): container finished" podID="b61b5e30-f563-4f0c-9578-4953d831ffb9" containerID="8edb361aa611b6752b45925da9abf8aaff9d6dc46cd831bedf4f48d2029eb028" exitCode=0 Oct 07 19:34:27 crc kubenswrapper[4813]: I1007 19:34:27.726211 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xbcqc" event={"ID":"b61b5e30-f563-4f0c-9578-4953d831ffb9","Type":"ContainerDied","Data":"8edb361aa611b6752b45925da9abf8aaff9d6dc46cd831bedf4f48d2029eb028"} Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.173633 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.312447 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j65l5\" (UniqueName: \"kubernetes.io/projected/a9fc7274-f1a7-436c-bd91-da50a3c3607b-kube-api-access-j65l5\") pod \"a9fc7274-f1a7-436c-bd91-da50a3c3607b\" (UID: \"a9fc7274-f1a7-436c-bd91-da50a3c3607b\") " Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.317576 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9fc7274-f1a7-436c-bd91-da50a3c3607b-kube-api-access-j65l5" (OuterVolumeSpecName: "kube-api-access-j65l5") pod "a9fc7274-f1a7-436c-bd91-da50a3c3607b" (UID: "a9fc7274-f1a7-436c-bd91-da50a3c3607b"). InnerVolumeSpecName "kube-api-access-j65l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.317819 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.357580 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.364203 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.414295 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sgcn\" (UniqueName: \"kubernetes.io/projected/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2-kube-api-access-7sgcn\") pod \"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2\" (UID: \"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2\") " Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.414383 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dntcl\" (UniqueName: \"kubernetes.io/projected/aee38026-115c-4782-bdde-eadd7cd26d62-kube-api-access-dntcl\") pod \"aee38026-115c-4782-bdde-eadd7cd26d62\" (UID: \"aee38026-115c-4782-bdde-eadd7cd26d62\") " Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.414416 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp8g8\" (UniqueName: \"kubernetes.io/projected/b61b5e30-f563-4f0c-9578-4953d831ffb9-kube-api-access-bp8g8\") pod \"b61b5e30-f563-4f0c-9578-4953d831ffb9\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.414472 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-config-data\") pod \"b61b5e30-f563-4f0c-9578-4953d831ffb9\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.414528 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-combined-ca-bundle\") pod \"b61b5e30-f563-4f0c-9578-4953d831ffb9\" (UID: \"b61b5e30-f563-4f0c-9578-4953d831ffb9\") " Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.414821 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j65l5\" (UniqueName: \"kubernetes.io/projected/a9fc7274-f1a7-436c-bd91-da50a3c3607b-kube-api-access-j65l5\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.417983 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b61b5e30-f563-4f0c-9578-4953d831ffb9-kube-api-access-bp8g8" (OuterVolumeSpecName: "kube-api-access-bp8g8") pod "b61b5e30-f563-4f0c-9578-4953d831ffb9" (UID: "b61b5e30-f563-4f0c-9578-4953d831ffb9"). InnerVolumeSpecName "kube-api-access-bp8g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.418188 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee38026-115c-4782-bdde-eadd7cd26d62-kube-api-access-dntcl" (OuterVolumeSpecName: "kube-api-access-dntcl") pod "aee38026-115c-4782-bdde-eadd7cd26d62" (UID: "aee38026-115c-4782-bdde-eadd7cd26d62"). InnerVolumeSpecName "kube-api-access-dntcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.418564 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2-kube-api-access-7sgcn" (OuterVolumeSpecName: "kube-api-access-7sgcn") pod "54066ef9-3488-4b4c-a9ba-ba8a6cf48df2" (UID: "54066ef9-3488-4b4c-a9ba-ba8a6cf48df2"). InnerVolumeSpecName "kube-api-access-7sgcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.437708 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b61b5e30-f563-4f0c-9578-4953d831ffb9" (UID: "b61b5e30-f563-4f0c-9578-4953d831ffb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.453481 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-config-data" (OuterVolumeSpecName: "config-data") pod "b61b5e30-f563-4f0c-9578-4953d831ffb9" (UID: "b61b5e30-f563-4f0c-9578-4953d831ffb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.517313 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dntcl\" (UniqueName: \"kubernetes.io/projected/aee38026-115c-4782-bdde-eadd7cd26d62-kube-api-access-dntcl\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.517392 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp8g8\" (UniqueName: \"kubernetes.io/projected/b61b5e30-f563-4f0c-9578-4953d831ffb9-kube-api-access-bp8g8\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.517415 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.517432 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b61b5e30-f563-4f0c-9578-4953d831ffb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.517450 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sgcn\" (UniqueName: \"kubernetes.io/projected/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2-kube-api-access-7sgcn\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.746251 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xbcqc" event={"ID":"b61b5e30-f563-4f0c-9578-4953d831ffb9","Type":"ContainerDied","Data":"f1a04205e2b24d3939608b4c692bbdcabed12c7e2bb020fd754c3963c42aa828"} Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.746290 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1a04205e2b24d3939608b4c692bbdcabed12c7e2bb020fd754c3963c42aa828" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.746303 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xbcqc" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.748207 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3bbe-account-create-gjbjp" event={"ID":"a9fc7274-f1a7-436c-bd91-da50a3c3607b","Type":"ContainerDied","Data":"17065118710c37a011fc37b7d3c4ce57a6724bd8ffd812659b3e5c4899a678bf"} Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.748218 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3bbe-account-create-gjbjp" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.748481 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17065118710c37a011fc37b7d3c4ce57a6724bd8ffd812659b3e5c4899a678bf" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.749848 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-28a4-account-create-5qhdj" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.749869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-28a4-account-create-5qhdj" event={"ID":"54066ef9-3488-4b4c-a9ba-ba8a6cf48df2","Type":"ContainerDied","Data":"933ca8d19726233b59ad2034302850bf7b7188525dacfcb55a0c9b6df210a614"} Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.749894 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="933ca8d19726233b59ad2034302850bf7b7188525dacfcb55a0c9b6df210a614" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.754806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a752-account-create-zm29m" event={"ID":"aee38026-115c-4782-bdde-eadd7cd26d62","Type":"ContainerDied","Data":"05512ef161c1c361437dbb72c41fe040e583e06c22542c1aaee9720272ad594f"} Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.755158 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05512ef161c1c361437dbb72c41fe040e583e06c22542c1aaee9720272ad594f" Oct 07 19:34:29 crc kubenswrapper[4813]: I1007 19:34:29.755802 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a752-account-create-zm29m" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.012967 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-btwjb"] Oct 07 19:34:30 crc kubenswrapper[4813]: E1007 19:34:30.013289 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b61b5e30-f563-4f0c-9578-4953d831ffb9" containerName="keystone-db-sync" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013305 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b61b5e30-f563-4f0c-9578-4953d831ffb9" containerName="keystone-db-sync" Oct 07 19:34:30 crc kubenswrapper[4813]: E1007 19:34:30.013335 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9fc7274-f1a7-436c-bd91-da50a3c3607b" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013341 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9fc7274-f1a7-436c-bd91-da50a3c3607b" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: E1007 19:34:30.013358 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54066ef9-3488-4b4c-a9ba-ba8a6cf48df2" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013366 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="54066ef9-3488-4b4c-a9ba-ba8a6cf48df2" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: E1007 19:34:30.013377 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee38026-115c-4782-bdde-eadd7cd26d62" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013383 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee38026-115c-4782-bdde-eadd7cd26d62" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013524 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9fc7274-f1a7-436c-bd91-da50a3c3607b" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013542 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="54066ef9-3488-4b4c-a9ba-ba8a6cf48df2" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013553 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b61b5e30-f563-4f0c-9578-4953d831ffb9" containerName="keystone-db-sync" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.013564 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee38026-115c-4782-bdde-eadd7cd26d62" containerName="mariadb-account-create" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.024007 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.045484 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-btwjb"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.057845 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-2xzlv"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.058842 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.069469 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.070452 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.078382 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gbnk5" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.078707 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.100251 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2xzlv"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.126610 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-fernet-keys\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.126925 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.126953 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzgdn\" (UniqueName: \"kubernetes.io/projected/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-kube-api-access-pzgdn\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.126982 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127003 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-credential-keys\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-scripts\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127050 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4g47\" (UniqueName: \"kubernetes.io/projected/ec6f0ebc-40e5-42bf-afde-c471da0e532a-kube-api-access-k4g47\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127133 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127150 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-config\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127188 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-combined-ca-bundle\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.127208 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-config-data\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229451 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-config\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229520 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-combined-ca-bundle\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229539 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-config-data\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229565 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-fernet-keys\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229579 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229597 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzgdn\" (UniqueName: \"kubernetes.io/projected/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-kube-api-access-pzgdn\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229621 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229638 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-credential-keys\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229664 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-scripts\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229690 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229773 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4g47\" (UniqueName: \"kubernetes.io/projected/ec6f0ebc-40e5-42bf-afde-c471da0e532a-kube-api-access-k4g47\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.229807 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.230715 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.230938 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-config\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.231256 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.231842 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.231875 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.237999 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-fernet-keys\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.239929 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-credential-keys\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.245375 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-config-data\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.247952 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-combined-ca-bundle\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.248195 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-scripts\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.301025 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.303649 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.320469 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-f99dc5487-btwgz"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.321721 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.326793 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.327439 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.338761 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.338960 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.339118 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-h56dp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.339257 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.354392 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzgdn\" (UniqueName: \"kubernetes.io/projected/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-kube-api-access-pzgdn\") pod \"keystone-bootstrap-2xzlv\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.363402 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f99dc5487-btwgz"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.373062 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4g47\" (UniqueName: \"kubernetes.io/projected/ec6f0ebc-40e5-42bf-afde-c471da0e532a-kube-api-access-k4g47\") pod \"dnsmasq-dns-5c5cc7c5ff-btwjb\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.391555 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.401297 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438205 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eec647-dfb7-4ff8-b143-3823f9aae7bb-logs\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438255 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqgxp\" (UniqueName: \"kubernetes.io/projected/d3625b48-fada-4ec5-a62b-4ec51555f5b3-kube-api-access-jqgxp\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438281 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-config-data\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438350 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-log-httpd\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438387 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-config-data\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438405 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltknd\" (UniqueName: \"kubernetes.io/projected/61eec647-dfb7-4ff8-b143-3823f9aae7bb-kube-api-access-ltknd\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438421 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-run-httpd\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438435 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-scripts\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438465 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438485 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61eec647-dfb7-4ff8-b143-3823f9aae7bb-horizon-secret-key\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438502 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.438533 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.439166 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542495 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-log-httpd\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542750 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-config-data\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542772 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltknd\" (UniqueName: \"kubernetes.io/projected/61eec647-dfb7-4ff8-b143-3823f9aae7bb-kube-api-access-ltknd\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-run-httpd\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542802 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-scripts\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542829 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542850 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61eec647-dfb7-4ff8-b143-3823f9aae7bb-horizon-secret-key\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542867 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542895 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542913 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eec647-dfb7-4ff8-b143-3823f9aae7bb-logs\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542928 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqgxp\" (UniqueName: \"kubernetes.io/projected/d3625b48-fada-4ec5-a62b-4ec51555f5b3-kube-api-access-jqgxp\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.542952 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-config-data\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.547284 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-log-httpd\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.548941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.549203 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-run-httpd\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.550101 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-config-data\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.551186 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eec647-dfb7-4ff8-b143-3823f9aae7bb-logs\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.551862 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.566478 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61eec647-dfb7-4ff8-b143-3823f9aae7bb-horizon-secret-key\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.566988 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-config-data\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.568506 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-scripts\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.582031 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.596474 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltknd\" (UniqueName: \"kubernetes.io/projected/61eec647-dfb7-4ff8-b143-3823f9aae7bb-kube-api-access-ltknd\") pod \"horizon-f99dc5487-btwgz\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.611479 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqgxp\" (UniqueName: \"kubernetes.io/projected/d3625b48-fada-4ec5-a62b-4ec51555f5b3-kube-api-access-jqgxp\") pod \"ceilometer-0\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.624207 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.627737 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.641518 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.641578 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.641807 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mjwqx" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.641839 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.641952 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.651181 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-btwjb"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.680772 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.738406 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-58vdp"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.739471 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.744624 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.745586 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.761658 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-wx9pw" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.761937 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.761978 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762031 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762057 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762089 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg5s2\" (UniqueName: \"kubernetes.io/projected/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-kube-api-access-tg5s2\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762115 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-logs\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762140 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762166 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.762632 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.763938 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-79f8458665-fljfq"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.770737 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.794235 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-58vdp"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.834072 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79f8458665-fljfq"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.856190 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-qlbgn"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.858150 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.869116 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.869163 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.869191 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-horizon-secret-key\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.869213 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg5s2\" (UniqueName: \"kubernetes.io/projected/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-kube-api-access-tg5s2\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.869240 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-logs\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.869267 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.870263 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-logs\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871745 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871797 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7kq6\" (UniqueName: \"kubernetes.io/projected/8f039eca-b53a-446b-b219-2b6f2d56a0b4-kube-api-access-x7kq6\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871867 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-config-data\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871923 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-scripts\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871947 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-config-data\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.871971 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-logs\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.872000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.872025 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f039eca-b53a-446b-b219-2b6f2d56a0b4-logs\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.872073 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v6dr\" (UniqueName: \"kubernetes.io/projected/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-kube-api-access-5v6dr\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.872102 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-combined-ca-bundle\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.872156 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.872482 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.874787 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.875239 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-qlbgn"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.882952 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.883691 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.892428 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.893861 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.909353 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg5s2\" (UniqueName: \"kubernetes.io/projected/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-kube-api-access-tg5s2\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.954574 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.957909 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.968492 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.984077 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986027 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986085 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-config-data\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986115 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-config\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986264 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-logs\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986303 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f039eca-b53a-446b-b219-2b6f2d56a0b4-logs\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986480 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v6dr\" (UniqueName: \"kubernetes.io/projected/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-kube-api-access-5v6dr\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986512 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-combined-ca-bundle\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986567 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986599 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbhkb\" (UniqueName: \"kubernetes.io/projected/6eacad55-fc64-4e75-b743-b106ce2d7c0d-kube-api-access-lbhkb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986622 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-horizon-secret-key\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986655 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986688 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986739 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986767 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7kq6\" (UniqueName: \"kubernetes.io/projected/8f039eca-b53a-446b-b219-2b6f2d56a0b4-kube-api-access-x7kq6\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986814 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-config-data\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:30 crc kubenswrapper[4813]: I1007 19:34:30.986845 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-scripts\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:30.996574 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-config-data\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:30.997209 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-logs\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.001181 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.001376 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.006893 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f039eca-b53a-446b-b219-2b6f2d56a0b4-logs\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.008864 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.024305 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-scripts\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.044008 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-combined-ca-bundle\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.045231 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-config-data\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.069519 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7kq6\" (UniqueName: \"kubernetes.io/projected/8f039eca-b53a-446b-b219-2b6f2d56a0b4-kube-api-access-x7kq6\") pod \"placement-db-sync-58vdp\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.070529 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-horizon-secret-key\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.079852 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v6dr\" (UniqueName: \"kubernetes.io/projected/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-kube-api-access-5v6dr\") pod \"horizon-79f8458665-fljfq\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.110660 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-58vdp" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.111724 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152231 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl7pl\" (UniqueName: \"kubernetes.io/projected/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-kube-api-access-vl7pl\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152297 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152343 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbhkb\" (UniqueName: \"kubernetes.io/projected/6eacad55-fc64-4e75-b743-b106ce2d7c0d-kube-api-access-lbhkb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152374 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152403 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152425 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152457 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152500 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152542 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-logs\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152562 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152586 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152610 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-config\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152630 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.152659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.153945 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.154459 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.156257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.157009 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-config\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.157080 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254192 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl7pl\" (UniqueName: \"kubernetes.io/projected/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-kube-api-access-vl7pl\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254250 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254285 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254309 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254397 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-logs\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254417 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.254453 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.259921 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-logs\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.260766 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.263005 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-95b57"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.264127 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.268209 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.279714 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.282153 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jr2vz" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.282364 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.282506 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.288146 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbhkb\" (UniqueName: \"kubernetes.io/projected/6eacad55-fc64-4e75-b743-b106ce2d7c0d-kube-api-access-lbhkb\") pod \"dnsmasq-dns-8b5c85b87-qlbgn\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.289721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.294098 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.311819 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.340160 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-95b57"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.340514 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.349628 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl7pl\" (UniqueName: \"kubernetes.io/projected/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-kube-api-access-vl7pl\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370139 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-combined-ca-bundle\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370179 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vfmq\" (UniqueName: \"kubernetes.io/projected/37f756c5-2123-4e5b-9c02-f33dd061d767-kube-api-access-2vfmq\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370225 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37f756c5-2123-4e5b-9c02-f33dd061d767-etc-machine-id\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370269 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370300 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-db-sync-config-data\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370368 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-config-data\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370394 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-scripts\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.370946 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.407975 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-btwjb"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.479190 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-scripts\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.479312 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-combined-ca-bundle\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.479349 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vfmq\" (UniqueName: \"kubernetes.io/projected/37f756c5-2123-4e5b-9c02-f33dd061d767-kube-api-access-2vfmq\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.479389 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37f756c5-2123-4e5b-9c02-f33dd061d767-etc-machine-id\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.479425 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-db-sync-config-data\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.479454 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-config-data\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.486468 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37f756c5-2123-4e5b-9c02-f33dd061d767-etc-machine-id\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.488645 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-scripts\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.491779 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-db-sync-config-data\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.493773 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-combined-ca-bundle\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: W1007 19:34:31.500131 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec6f0ebc_40e5_42bf_afde_c471da0e532a.slice/crio-9fc824a6bccfffa5bc8bd7a4f1c928dac1bb5cea0444f042cb3c7527192c1bad WatchSource:0}: Error finding container 9fc824a6bccfffa5bc8bd7a4f1c928dac1bb5cea0444f042cb3c7527192c1bad: Status 404 returned error can't find the container with id 9fc824a6bccfffa5bc8bd7a4f1c928dac1bb5cea0444f042cb3c7527192c1bad Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.504301 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-config-data\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.509382 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.533979 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vfmq\" (UniqueName: \"kubernetes.io/projected/37f756c5-2123-4e5b-9c02-f33dd061d767-kube-api-access-2vfmq\") pod \"cinder-db-sync-95b57\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.555267 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.570037 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2xzlv"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.690332 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-f99dc5487-btwgz"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.717552 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-95b57" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.758403 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-7b94g"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.759514 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.763781 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.763936 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nz29z" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.791179 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7b94g"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.795024 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-combined-ca-bundle\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.795161 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-db-sync-config-data\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.795273 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd5z9\" (UniqueName: \"kubernetes.io/projected/7aa4a319-f846-4044-a663-c75e35168316-kube-api-access-jd5z9\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.872658 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2xzlv" event={"ID":"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c","Type":"ContainerStarted","Data":"625e2e4e47644a8c5997ef3aa53f81595d9f483beac0cc70e8f17656b47c99ef"} Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.880478 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-wc2qf"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.881708 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.883683 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f99dc5487-btwgz" event={"ID":"61eec647-dfb7-4ff8-b143-3823f9aae7bb","Type":"ContainerStarted","Data":"3d230b1ce266e2a3c2a3ab6e9e7cd734d292d931c760eb406e39c40b643a54e4"} Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.895035 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-flr6f" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.896309 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-db-sync-config-data\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.896412 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd5z9\" (UniqueName: \"kubernetes.io/projected/7aa4a319-f846-4044-a663-c75e35168316-kube-api-access-jd5z9\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.896485 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-combined-ca-bundle\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.901635 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.901775 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.904453 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wc2qf"] Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.915231 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-combined-ca-bundle\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.922522 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-db-sync-config-data\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.933936 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd5z9\" (UniqueName: \"kubernetes.io/projected/7aa4a319-f846-4044-a663-c75e35168316-kube-api-access-jd5z9\") pod \"barbican-db-sync-7b94g\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:31 crc kubenswrapper[4813]: I1007 19:34:31.960654 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" event={"ID":"ec6f0ebc-40e5-42bf-afde-c471da0e532a","Type":"ContainerStarted","Data":"9fc824a6bccfffa5bc8bd7a4f1c928dac1bb5cea0444f042cb3c7527192c1bad"} Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.000335 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxnzk\" (UniqueName: \"kubernetes.io/projected/68a76fc8-778e-4878-b798-8c21827833b4-kube-api-access-vxnzk\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.000523 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-config\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.000638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-combined-ca-bundle\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.002839 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.101452 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxnzk\" (UniqueName: \"kubernetes.io/projected/68a76fc8-778e-4878-b798-8c21827833b4-kube-api-access-vxnzk\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.101481 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-config\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.110371 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-combined-ca-bundle\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.119123 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-config\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.119173 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-combined-ca-bundle\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.133240 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7b94g" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.143428 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxnzk\" (UniqueName: \"kubernetes.io/projected/68a76fc8-778e-4878-b798-8c21827833b4-kube-api-access-vxnzk\") pod \"neutron-db-sync-wc2qf\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.307250 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.502636 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-qlbgn"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.551765 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79f8458665-fljfq"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.574626 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-58vdp"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.735451 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-95b57"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.746026 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.820875 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7b94g"] Oct 07 19:34:32 crc kubenswrapper[4813]: W1007 19:34:32.869718 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7aa4a319_f846_4044_a663_c75e35168316.slice/crio-aa7d6d2815fefd90b8b4295a62fd75d0653a846acc20b757169fabe26b39ab85 WatchSource:0}: Error finding container aa7d6d2815fefd90b8b4295a62fd75d0653a846acc20b757169fabe26b39ab85: Status 404 returned error can't find the container with id aa7d6d2815fefd90b8b4295a62fd75d0653a846acc20b757169fabe26b39ab85 Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.990037 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3625b48-fada-4ec5-a62b-4ec51555f5b3","Type":"ContainerStarted","Data":"7f5fa66cea70981e619d987fd4c7b05482677211bc2d98f17443a1496c45d975"} Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.991293 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7b94g" event={"ID":"7aa4a319-f846-4044-a663-c75e35168316","Type":"ContainerStarted","Data":"aa7d6d2815fefd90b8b4295a62fd75d0653a846acc20b757169fabe26b39ab85"} Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.991790 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wc2qf"] Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.993450 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f8458665-fljfq" event={"ID":"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec","Type":"ContainerStarted","Data":"cbf3bd116af2b44b70aabdd1758bec2884ffed477f8d0d3e50d85e13c74caa49"} Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.996721 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-58vdp" event={"ID":"8f039eca-b53a-446b-b219-2b6f2d56a0b4","Type":"ContainerStarted","Data":"50b1f8838f79de01af0298fe712f05921dd89c6f8e461bad6affe10c2fe97704"} Oct 07 19:34:32 crc kubenswrapper[4813]: I1007 19:34:32.998033 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65","Type":"ContainerStarted","Data":"0625a1b2719aad8d0304a3a84c3ab0e0b5c5564c6a4ac383b58c14bd6575658a"} Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.001064 4813 generic.go:334] "Generic (PLEG): container finished" podID="ec6f0ebc-40e5-42bf-afde-c471da0e532a" containerID="9e8677825ba4a240a39e82ce1c36747a95916f79bd2a9908faf50a408e625776" exitCode=0 Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.001105 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" event={"ID":"ec6f0ebc-40e5-42bf-afde-c471da0e532a","Type":"ContainerDied","Data":"9e8677825ba4a240a39e82ce1c36747a95916f79bd2a9908faf50a408e625776"} Oct 07 19:34:33 crc kubenswrapper[4813]: W1007 19:34:33.002679 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68a76fc8_778e_4878_b798_8c21827833b4.slice/crio-3de30a04af052adbaa1a45a86e7d6258de60be7b0298b10b199b9fc4c06aae55 WatchSource:0}: Error finding container 3de30a04af052adbaa1a45a86e7d6258de60be7b0298b10b199b9fc4c06aae55: Status 404 returned error can't find the container with id 3de30a04af052adbaa1a45a86e7d6258de60be7b0298b10b199b9fc4c06aae55 Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.005295 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-95b57" event={"ID":"37f756c5-2123-4e5b-9c02-f33dd061d767","Type":"ContainerStarted","Data":"5f1d947136e70799c8bb6061df12edda98af6466b22a107a9015e99ce6f1ea62"} Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.008701 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" event={"ID":"6eacad55-fc64-4e75-b743-b106ce2d7c0d","Type":"ContainerStarted","Data":"931bec6ada43b9372b5db5fad39551e11fe8e13191d0f37f5cdc78b3ea22e3a9"} Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.012016 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2xzlv" event={"ID":"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c","Type":"ContainerStarted","Data":"0842544f6e4f3273a498cd8fb11528a4821a5009b3ab2a12c9796bc10ff4b8f9"} Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.039995 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-2xzlv" podStartSLOduration=4.03997363 podStartE2EDuration="4.03997363s" podCreationTimestamp="2025-10-07 19:34:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:34:33.039800756 +0000 UTC m=+999.118056367" watchObservedRunningTime="2025-10-07 19:34:33.03997363 +0000 UTC m=+999.118229241" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.368732 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.582562 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.759544 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-svc\") pod \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.759597 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-nb\") pod \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.759637 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4g47\" (UniqueName: \"kubernetes.io/projected/ec6f0ebc-40e5-42bf-afde-c471da0e532a-kube-api-access-k4g47\") pod \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.759681 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-sb\") pod \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.759739 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-config\") pod \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.759798 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-swift-storage-0\") pod \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\" (UID: \"ec6f0ebc-40e5-42bf-afde-c471da0e532a\") " Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.778207 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6f0ebc-40e5-42bf-afde-c471da0e532a-kube-api-access-k4g47" (OuterVolumeSpecName: "kube-api-access-k4g47") pod "ec6f0ebc-40e5-42bf-afde-c471da0e532a" (UID: "ec6f0ebc-40e5-42bf-afde-c471da0e532a"). InnerVolumeSpecName "kube-api-access-k4g47". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.799625 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ec6f0ebc-40e5-42bf-afde-c471da0e532a" (UID: "ec6f0ebc-40e5-42bf-afde-c471da0e532a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.814587 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ec6f0ebc-40e5-42bf-afde-c471da0e532a" (UID: "ec6f0ebc-40e5-42bf-afde-c471da0e532a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.839362 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ec6f0ebc-40e5-42bf-afde-c471da0e532a" (UID: "ec6f0ebc-40e5-42bf-afde-c471da0e532a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.842279 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec6f0ebc-40e5-42bf-afde-c471da0e532a" (UID: "ec6f0ebc-40e5-42bf-afde-c471da0e532a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.852091 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-config" (OuterVolumeSpecName: "config") pod "ec6f0ebc-40e5-42bf-afde-c471da0e532a" (UID: "ec6f0ebc-40e5-42bf-afde-c471da0e532a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.862242 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.862277 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.862290 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.862299 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4g47\" (UniqueName: \"kubernetes.io/projected/ec6f0ebc-40e5-42bf-afde-c471da0e532a-kube-api-access-k4g47\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.862309 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:33 crc kubenswrapper[4813]: I1007 19:34:33.862317 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6f0ebc-40e5-42bf-afde-c471da0e532a-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.045806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb","Type":"ContainerStarted","Data":"0c819d7466b82d6efc10456171a854369c03d404a535f6afe9802222ce0b2181"} Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.049081 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wc2qf" event={"ID":"68a76fc8-778e-4878-b798-8c21827833b4","Type":"ContainerStarted","Data":"0c3a5d4f667b23b850823ffa9053ac0bbbcc99f2832056baca7c080460401a8a"} Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.049129 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wc2qf" event={"ID":"68a76fc8-778e-4878-b798-8c21827833b4","Type":"ContainerStarted","Data":"3de30a04af052adbaa1a45a86e7d6258de60be7b0298b10b199b9fc4c06aae55"} Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.061407 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65","Type":"ContainerStarted","Data":"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0"} Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.063205 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-wc2qf" podStartSLOduration=3.063190777 podStartE2EDuration="3.063190777s" podCreationTimestamp="2025-10-07 19:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:34:34.061612924 +0000 UTC m=+1000.139868535" watchObservedRunningTime="2025-10-07 19:34:34.063190777 +0000 UTC m=+1000.141446388" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.079487 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" event={"ID":"ec6f0ebc-40e5-42bf-afde-c471da0e532a","Type":"ContainerDied","Data":"9fc824a6bccfffa5bc8bd7a4f1c928dac1bb5cea0444f042cb3c7527192c1bad"} Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.079532 4813 scope.go:117] "RemoveContainer" containerID="9e8677825ba4a240a39e82ce1c36747a95916f79bd2a9908faf50a408e625776" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.079648 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-btwjb" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.107484 4813 generic.go:334] "Generic (PLEG): container finished" podID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerID="a834cedcd07ae4fe1a2a84cae98724a093bb751ca41de31b4d536f05055e8289" exitCode=0 Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.108720 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" event={"ID":"6eacad55-fc64-4e75-b743-b106ce2d7c0d","Type":"ContainerDied","Data":"a834cedcd07ae4fe1a2a84cae98724a093bb751ca41de31b4d536f05055e8289"} Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.300869 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.351429 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-79f8458665-fljfq"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.406010 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-btwjb"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.432305 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-btwjb"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.454721 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7479685c4f-mx7hc"] Oct 07 19:34:34 crc kubenswrapper[4813]: E1007 19:34:34.455127 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6f0ebc-40e5-42bf-afde-c471da0e532a" containerName="init" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.455142 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6f0ebc-40e5-42bf-afde-c471da0e532a" containerName="init" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.455348 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6f0ebc-40e5-42bf-afde-c471da0e532a" containerName="init" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.456233 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.484147 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7479685c4f-mx7hc"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.530679 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.550810 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.597989 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-config-data\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.598033 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-scripts\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.598066 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrblm\" (UniqueName: \"kubernetes.io/projected/1b704b49-2eba-44ec-8c8f-88801848930a-kube-api-access-vrblm\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.598215 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b704b49-2eba-44ec-8c8f-88801848930a-logs\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.598253 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1b704b49-2eba-44ec-8c8f-88801848930a-horizon-secret-key\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.674390 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec6f0ebc-40e5-42bf-afde-c471da0e532a" path="/var/lib/kubelet/pods/ec6f0ebc-40e5-42bf-afde-c471da0e532a/volumes" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.704295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b704b49-2eba-44ec-8c8f-88801848930a-logs\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.704600 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1b704b49-2eba-44ec-8c8f-88801848930a-horizon-secret-key\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.704722 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-config-data\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.704808 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-scripts\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.704877 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrblm\" (UniqueName: \"kubernetes.io/projected/1b704b49-2eba-44ec-8c8f-88801848930a-kube-api-access-vrblm\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.705583 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b704b49-2eba-44ec-8c8f-88801848930a-logs\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.706134 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-scripts\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.709729 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-config-data\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.712686 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1b704b49-2eba-44ec-8c8f-88801848930a-horizon-secret-key\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.736968 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrblm\" (UniqueName: \"kubernetes.io/projected/1b704b49-2eba-44ec-8c8f-88801848930a-kube-api-access-vrblm\") pod \"horizon-7479685c4f-mx7hc\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:34 crc kubenswrapper[4813]: I1007 19:34:34.830467 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:34:35 crc kubenswrapper[4813]: I1007 19:34:35.264610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" event={"ID":"6eacad55-fc64-4e75-b743-b106ce2d7c0d","Type":"ContainerStarted","Data":"6421289613ac9e634579a0cb4f21c786241acc6bf2daa80ad64e96f5fbd39467"} Oct 07 19:34:35 crc kubenswrapper[4813]: I1007 19:34:35.265306 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:35 crc kubenswrapper[4813]: I1007 19:34:35.291101 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" podStartSLOduration=5.291084017 podStartE2EDuration="5.291084017s" podCreationTimestamp="2025-10-07 19:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:34:35.28901657 +0000 UTC m=+1001.367272171" watchObservedRunningTime="2025-10-07 19:34:35.291084017 +0000 UTC m=+1001.369339628" Oct 07 19:34:35 crc kubenswrapper[4813]: I1007 19:34:35.297185 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb","Type":"ContainerStarted","Data":"2d918d88af6c606a63b9eae8ddb6d49ec038db52c1498d3d3e963ba4af915986"} Oct 07 19:34:35 crc kubenswrapper[4813]: I1007 19:34:35.512364 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7479685c4f-mx7hc"] Oct 07 19:34:36 crc kubenswrapper[4813]: I1007 19:34:36.313647 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb","Type":"ContainerStarted","Data":"75a7da1e0d96f5b0fb273f0e9e5f077a513a8a5c6bcbb5d07915cda09c754e08"} Oct 07 19:34:36 crc kubenswrapper[4813]: I1007 19:34:36.316718 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7479685c4f-mx7hc" event={"ID":"1b704b49-2eba-44ec-8c8f-88801848930a","Type":"ContainerStarted","Data":"1967fe3fcc85c1fbe3889c2bc8d42bb991085e5a981513d64ab3c61d88725d15"} Oct 07 19:34:36 crc kubenswrapper[4813]: I1007 19:34:36.321452 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65","Type":"ContainerStarted","Data":"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50"} Oct 07 19:34:36 crc kubenswrapper[4813]: I1007 19:34:36.322702 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-log" containerID="cri-o://39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0" gracePeriod=30 Oct 07 19:34:36 crc kubenswrapper[4813]: I1007 19:34:36.322930 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-httpd" containerID="cri-o://3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50" gracePeriod=30 Oct 07 19:34:36 crc kubenswrapper[4813]: I1007 19:34:36.344426 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.344408169 podStartE2EDuration="6.344408169s" podCreationTimestamp="2025-10-07 19:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:34:36.338771114 +0000 UTC m=+1002.417026725" watchObservedRunningTime="2025-10-07 19:34:36.344408169 +0000 UTC m=+1002.422663780" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.178545 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.267857 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl7pl\" (UniqueName: \"kubernetes.io/projected/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-kube-api-access-vl7pl\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268220 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-scripts\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268305 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-config-data\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268366 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268406 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-logs\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268443 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-internal-tls-certs\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268463 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-combined-ca-bundle\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.268508 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-httpd-run\") pod \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\" (UID: \"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65\") " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.269252 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-logs" (OuterVolumeSpecName: "logs") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.269489 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.270622 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.270647 4813 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.296616 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.296743 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-scripts" (OuterVolumeSpecName: "scripts") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.296751 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-kube-api-access-vl7pl" (OuterVolumeSpecName: "kube-api-access-vl7pl") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "kube-api-access-vl7pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.313215 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.326225 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.333680 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-config-data" (OuterVolumeSpecName: "config-data") pod "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" (UID: "5aa1eb7a-da80-4b99-9a0f-d5da4e348e65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336757 4813 generic.go:334] "Generic (PLEG): container finished" podID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerID="3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50" exitCode=0 Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336791 4813 generic.go:334] "Generic (PLEG): container finished" podID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerID="39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0" exitCode=143 Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336831 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336857 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65","Type":"ContainerDied","Data":"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50"} Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336914 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65","Type":"ContainerDied","Data":"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0"} Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336925 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5aa1eb7a-da80-4b99-9a0f-d5da4e348e65","Type":"ContainerDied","Data":"0625a1b2719aad8d0304a3a84c3ab0e0b5c5564c6a4ac383b58c14bd6575658a"} Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.336940 4813 scope.go:117] "RemoveContainer" containerID="3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.337348 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-log" containerID="cri-o://2d918d88af6c606a63b9eae8ddb6d49ec038db52c1498d3d3e963ba4af915986" gracePeriod=30 Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.337387 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-httpd" containerID="cri-o://75a7da1e0d96f5b0fb273f0e9e5f077a513a8a5c6bcbb5d07915cda09c754e08" gracePeriod=30 Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.381208 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl7pl\" (UniqueName: \"kubernetes.io/projected/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-kube-api-access-vl7pl\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.381252 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.381264 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.381299 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.381333 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.381344 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.387641 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.387603884 podStartE2EDuration="7.387603884s" podCreationTimestamp="2025-10-07 19:34:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:34:37.368765657 +0000 UTC m=+1003.447021268" watchObservedRunningTime="2025-10-07 19:34:37.387603884 +0000 UTC m=+1003.465859485" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.400696 4813 scope.go:117] "RemoveContainer" containerID="39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.409272 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.413251 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.426495 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.447342 4813 scope.go:117] "RemoveContainer" containerID="3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50" Oct 07 19:34:37 crc kubenswrapper[4813]: E1007 19:34:37.450198 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50\": container with ID starting with 3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50 not found: ID does not exist" containerID="3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.450248 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50"} err="failed to get container status \"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50\": rpc error: code = NotFound desc = could not find container \"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50\": container with ID starting with 3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50 not found: ID does not exist" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.450275 4813 scope.go:117] "RemoveContainer" containerID="39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0" Oct 07 19:34:37 crc kubenswrapper[4813]: E1007 19:34:37.459163 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0\": container with ID starting with 39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0 not found: ID does not exist" containerID="39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.459288 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0"} err="failed to get container status \"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0\": rpc error: code = NotFound desc = could not find container \"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0\": container with ID starting with 39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0 not found: ID does not exist" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.459398 4813 scope.go:117] "RemoveContainer" containerID="3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.459565 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:37 crc kubenswrapper[4813]: E1007 19:34:37.460114 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-log" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.460205 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-log" Oct 07 19:34:37 crc kubenswrapper[4813]: E1007 19:34:37.460295 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-httpd" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.460385 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-httpd" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.460655 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-log" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.460744 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" containerName="glance-httpd" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.461895 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.469875 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.470056 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.471755 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.472689 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50"} err="failed to get container status \"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50\": rpc error: code = NotFound desc = could not find container \"3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50\": container with ID starting with 3b0fc6931e4199e406c8b0c62c6dd772039d886cad6f99b6aca940808b56fb50 not found: ID does not exist" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.472720 4813 scope.go:117] "RemoveContainer" containerID="39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.474389 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0"} err="failed to get container status \"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0\": rpc error: code = NotFound desc = could not find container \"39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0\": container with ID starting with 39e94f9de2c6bf571ce31432bbaebf44f4813ea5b8aba82e5392140050d0a2a0 not found: ID does not exist" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.484009 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.587933 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-scripts\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588028 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn9fr\" (UniqueName: \"kubernetes.io/projected/146f3f20-fac7-4547-852e-dff6fde2f507-kube-api-access-zn9fr\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588106 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588240 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-logs\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588309 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588398 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588453 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.588535 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-config-data\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689639 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689689 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689712 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689746 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-config-data\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689799 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-scripts\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689870 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn9fr\" (UniqueName: \"kubernetes.io/projected/146f3f20-fac7-4547-852e-dff6fde2f507-kube-api-access-zn9fr\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.689917 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-logs\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.690317 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-logs\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.691193 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.691504 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.699705 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.699947 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-scripts\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.700203 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-config-data\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.707071 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.725149 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.737125 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn9fr\" (UniqueName: \"kubernetes.io/projected/146f3f20-fac7-4547-852e-dff6fde2f507-kube-api-access-zn9fr\") pod \"glance-default-internal-api-0\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:34:37 crc kubenswrapper[4813]: I1007 19:34:37.829526 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:34:38 crc kubenswrapper[4813]: I1007 19:34:38.366801 4813 generic.go:334] "Generic (PLEG): container finished" podID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerID="75a7da1e0d96f5b0fb273f0e9e5f077a513a8a5c6bcbb5d07915cda09c754e08" exitCode=0 Oct 07 19:34:38 crc kubenswrapper[4813]: I1007 19:34:38.367139 4813 generic.go:334] "Generic (PLEG): container finished" podID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerID="2d918d88af6c606a63b9eae8ddb6d49ec038db52c1498d3d3e963ba4af915986" exitCode=143 Oct 07 19:34:38 crc kubenswrapper[4813]: I1007 19:34:38.366869 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb","Type":"ContainerDied","Data":"75a7da1e0d96f5b0fb273f0e9e5f077a513a8a5c6bcbb5d07915cda09c754e08"} Oct 07 19:34:38 crc kubenswrapper[4813]: I1007 19:34:38.367221 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb","Type":"ContainerDied","Data":"2d918d88af6c606a63b9eae8ddb6d49ec038db52c1498d3d3e963ba4af915986"} Oct 07 19:34:38 crc kubenswrapper[4813]: I1007 19:34:38.579056 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:38 crc kubenswrapper[4813]: I1007 19:34:38.634594 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aa1eb7a-da80-4b99-9a0f-d5da4e348e65" path="/var/lib/kubelet/pods/5aa1eb7a-da80-4b99-9a0f-d5da4e348e65/volumes" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.386718 4813 generic.go:334] "Generic (PLEG): container finished" podID="7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" containerID="0842544f6e4f3273a498cd8fb11528a4821a5009b3ab2a12c9796bc10ff4b8f9" exitCode=0 Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.386960 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2xzlv" event={"ID":"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c","Type":"ContainerDied","Data":"0842544f6e4f3273a498cd8fb11528a4821a5009b3ab2a12c9796bc10ff4b8f9"} Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.580594 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f99dc5487-btwgz"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.625924 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-688984b46d-g79nd"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.627344 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.654822 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-688984b46d-g79nd"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.656718 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.733475 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7479685c4f-mx7hc"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742680 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpvgv\" (UniqueName: \"kubernetes.io/projected/aed6b0b2-d265-4f3f-a68b-215696e44617-kube-api-access-fpvgv\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742727 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-config-data\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742783 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aed6b0b2-d265-4f3f-a68b-215696e44617-logs\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-scripts\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742837 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-tls-certs\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742928 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-secret-key\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.742948 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-combined-ca-bundle\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.798614 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-687ddb5b-lwwn2"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.800719 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.802439 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-687ddb5b-lwwn2"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.811652 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844045 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aed6b0b2-d265-4f3f-a68b-215696e44617-logs\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844099 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-scripts\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844123 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-tls-certs\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844179 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0b0d403-9a0c-407b-a3d4-a0db3e612092-config-data\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844197 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-horizon-secret-key\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844220 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-secret-key\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844236 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-combined-ca-bundle\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844255 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0b0d403-9a0c-407b-a3d4-a0db3e612092-logs\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844281 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpvgv\" (UniqueName: \"kubernetes.io/projected/aed6b0b2-d265-4f3f-a68b-215696e44617-kube-api-access-fpvgv\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-config-data\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844332 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztcxz\" (UniqueName: \"kubernetes.io/projected/a0b0d403-9a0c-407b-a3d4-a0db3e612092-kube-api-access-ztcxz\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844348 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-horizon-tls-certs\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844363 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-combined-ca-bundle\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844388 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0b0d403-9a0c-407b-a3d4-a0db3e612092-scripts\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.844771 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aed6b0b2-d265-4f3f-a68b-215696e44617-logs\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.845274 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-scripts\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.847569 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-config-data\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.852248 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-combined-ca-bundle\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.852622 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-secret-key\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.883264 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpvgv\" (UniqueName: \"kubernetes.io/projected/aed6b0b2-d265-4f3f-a68b-215696e44617-kube-api-access-fpvgv\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.887177 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-tls-certs\") pod \"horizon-688984b46d-g79nd\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946087 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztcxz\" (UniqueName: \"kubernetes.io/projected/a0b0d403-9a0c-407b-a3d4-a0db3e612092-kube-api-access-ztcxz\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946375 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-horizon-tls-certs\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946394 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-combined-ca-bundle\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946424 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0b0d403-9a0c-407b-a3d4-a0db3e612092-scripts\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946539 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0b0d403-9a0c-407b-a3d4-a0db3e612092-config-data\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946557 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-horizon-secret-key\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0b0d403-9a0c-407b-a3d4-a0db3e612092-logs\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.946980 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0b0d403-9a0c-407b-a3d4-a0db3e612092-logs\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.947669 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0b0d403-9a0c-407b-a3d4-a0db3e612092-scripts\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.948062 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0b0d403-9a0c-407b-a3d4-a0db3e612092-config-data\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.951390 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-horizon-tls-certs\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.954806 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-combined-ca-bundle\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.966070 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0b0d403-9a0c-407b-a3d4-a0db3e612092-horizon-secret-key\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.967462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztcxz\" (UniqueName: \"kubernetes.io/projected/a0b0d403-9a0c-407b-a3d4-a0db3e612092-kube-api-access-ztcxz\") pod \"horizon-687ddb5b-lwwn2\" (UID: \"a0b0d403-9a0c-407b-a3d4-a0db3e612092\") " pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:39 crc kubenswrapper[4813]: I1007 19:34:39.985855 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:34:40 crc kubenswrapper[4813]: I1007 19:34:40.122042 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:34:41 crc kubenswrapper[4813]: I1007 19:34:41.342912 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:34:41 crc kubenswrapper[4813]: I1007 19:34:41.403278 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-vsctw"] Oct 07 19:34:41 crc kubenswrapper[4813]: I1007 19:34:41.403805 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" containerID="cri-o://ca4bf6ed6a57a79b5fa3e35d320f82e69a95a3f2f1dac428bccd806a7cc37339" gracePeriod=10 Oct 07 19:34:42 crc kubenswrapper[4813]: I1007 19:34:42.442028 4813 generic.go:334] "Generic (PLEG): container finished" podID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerID="ca4bf6ed6a57a79b5fa3e35d320f82e69a95a3f2f1dac428bccd806a7cc37339" exitCode=0 Oct 07 19:34:42 crc kubenswrapper[4813]: I1007 19:34:42.442243 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" event={"ID":"4dfa2131-81b6-474b-aa07-08ec422fa6bd","Type":"ContainerDied","Data":"ca4bf6ed6a57a79b5fa3e35d320f82e69a95a3f2f1dac428bccd806a7cc37339"} Oct 07 19:34:44 crc kubenswrapper[4813]: I1007 19:34:44.360891 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: connect: connection refused" Oct 07 19:34:48 crc kubenswrapper[4813]: E1007 19:34:48.566716 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 07 19:34:48 crc kubenswrapper[4813]: E1007 19:34:48.567181 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x7kq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-58vdp_openstack(8f039eca-b53a-446b-b219-2b6f2d56a0b4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:34:48 crc kubenswrapper[4813]: E1007 19:34:48.568734 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-58vdp" podUID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" Oct 07 19:34:48 crc kubenswrapper[4813]: W1007 19:34:48.572150 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod146f3f20_fac7_4547_852e_dff6fde2f507.slice/crio-3a84fcc2ffed0030e8200fae020c2d3878a9147e53c88c2ab5db1e20f5103e74 WatchSource:0}: Error finding container 3a84fcc2ffed0030e8200fae020c2d3878a9147e53c88c2ab5db1e20f5103e74: Status 404 returned error can't find the container with id 3a84fcc2ffed0030e8200fae020c2d3878a9147e53c88c2ab5db1e20f5103e74 Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.669559 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.810119 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.833456 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-public-tls-certs\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.833608 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-httpd-run\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.834153 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.834230 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-scripts\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.834621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.834902 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-combined-ca-bundle\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.835183 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg5s2\" (UniqueName: \"kubernetes.io/projected/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-kube-api-access-tg5s2\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.835218 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-config-data\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.835314 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-logs\") pod \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\" (UID: \"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.835931 4813 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.836136 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-logs" (OuterVolumeSpecName: "logs") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.846793 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-kube-api-access-tg5s2" (OuterVolumeSpecName: "kube-api-access-tg5s2") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "kube-api-access-tg5s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.861671 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.866301 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-scripts" (OuterVolumeSpecName: "scripts") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.880445 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.883533 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.927523 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-config-data" (OuterVolumeSpecName: "config-data") pod "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" (UID: "a68b9b31-5c0a-4343-ace1-34ca7f8a54eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.937569 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-fernet-keys\") pod \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.937692 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-credential-keys\") pod \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.937739 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-scripts\") pod \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.937812 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-combined-ca-bundle\") pod \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.937954 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzgdn\" (UniqueName: \"kubernetes.io/projected/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-kube-api-access-pzgdn\") pod \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.937982 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-config-data\") pod \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\" (UID: \"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c\") " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939112 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939234 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939253 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939276 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939310 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939670 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg5s2\" (UniqueName: \"kubernetes.io/projected/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-kube-api-access-tg5s2\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.939691 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.941798 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-scripts" (OuterVolumeSpecName: "scripts") pod "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" (UID: "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.944106 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" (UID: "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.944384 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" (UID: "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.947517 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-kube-api-access-pzgdn" (OuterVolumeSpecName: "kube-api-access-pzgdn") pod "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" (UID: "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c"). InnerVolumeSpecName "kube-api-access-pzgdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.966796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" (UID: "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.968051 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 07 19:34:48 crc kubenswrapper[4813]: I1007 19:34:48.970473 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-config-data" (OuterVolumeSpecName: "config-data") pod "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" (UID: "7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041848 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzgdn\" (UniqueName: \"kubernetes.io/projected/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-kube-api-access-pzgdn\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041876 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041885 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041893 4813 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041901 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041909 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.041917 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:34:49 crc kubenswrapper[4813]: E1007 19:34:49.204804 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Oct 07 19:34:49 crc kubenswrapper[4813]: E1007 19:34:49.205074 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n584h57ch597hc4hf4h646h555h94h657h55bh56dh565h659h578h56h5bch58dh57dhcch66fh8chfdhbfh5b8h594h569h5cbh68fh5h54h567h5b4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jqgxp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(d3625b48-fada-4ec5-a62b-4ec51555f5b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.509279 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2xzlv" event={"ID":"7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c","Type":"ContainerDied","Data":"625e2e4e47644a8c5997ef3aa53f81595d9f483beac0cc70e8f17656b47c99ef"} Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.509315 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="625e2e4e47644a8c5997ef3aa53f81595d9f483beac0cc70e8f17656b47c99ef" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.509395 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2xzlv" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.514605 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"146f3f20-fac7-4547-852e-dff6fde2f507","Type":"ContainerStarted","Data":"3a84fcc2ffed0030e8200fae020c2d3878a9147e53c88c2ab5db1e20f5103e74"} Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.517097 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.517101 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a68b9b31-5c0a-4343-ace1-34ca7f8a54eb","Type":"ContainerDied","Data":"0c819d7466b82d6efc10456171a854369c03d404a535f6afe9802222ce0b2181"} Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.517170 4813 scope.go:117] "RemoveContainer" containerID="75a7da1e0d96f5b0fb273f0e9e5f077a513a8a5c6bcbb5d07915cda09c754e08" Oct 07 19:34:49 crc kubenswrapper[4813]: E1007 19:34:49.518757 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-58vdp" podUID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.571171 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.636564 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.672870 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:49 crc kubenswrapper[4813]: E1007 19:34:49.673417 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-httpd" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.673433 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-httpd" Oct 07 19:34:49 crc kubenswrapper[4813]: E1007 19:34:49.673441 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-log" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.673446 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-log" Oct 07 19:34:49 crc kubenswrapper[4813]: E1007 19:34:49.673463 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" containerName="keystone-bootstrap" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.673469 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" containerName="keystone-bootstrap" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.674330 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" containerName="keystone-bootstrap" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.674358 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-log" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.674391 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" containerName="glance-httpd" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.675588 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.678122 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.678570 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.682054 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767290 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-config-data\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767362 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767435 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767503 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-scripts\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767550 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767577 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767637 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n225g\" (UniqueName: \"kubernetes.io/projected/825f92d1-f764-41bf-89ec-a0760b63ebff-kube-api-access-n225g\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.767677 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-logs\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868218 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-logs\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868334 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-config-data\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868361 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868397 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868436 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-scripts\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868483 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868520 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868543 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n225g\" (UniqueName: \"kubernetes.io/projected/825f92d1-f764-41bf-89ec-a0760b63ebff-kube-api-access-n225g\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868716 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-logs\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868830 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.868724 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.873999 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-scripts\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.875618 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.878809 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.889423 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n225g\" (UniqueName: \"kubernetes.io/projected/825f92d1-f764-41bf-89ec-a0760b63ebff-kube-api-access-n225g\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.896033 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-config-data\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.920746 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " pod="openstack/glance-default-external-api-0" Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.986781 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-2xzlv"] Oct 07 19:34:49 crc kubenswrapper[4813]: I1007 19:34:49.994645 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-2xzlv"] Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.003047 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.095309 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-zz7dp"] Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.096579 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.099448 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.099459 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.099460 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.099621 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gbnk5" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.133494 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zz7dp"] Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.172970 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-combined-ca-bundle\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.173281 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-config-data\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.173435 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-fernet-keys\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.173510 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-scripts\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.173530 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-credential-keys\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.173585 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmnmh\" (UniqueName: \"kubernetes.io/projected/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-kube-api-access-dmnmh\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.275487 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-scripts\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.275532 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-credential-keys\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.275580 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmnmh\" (UniqueName: \"kubernetes.io/projected/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-kube-api-access-dmnmh\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.275632 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-combined-ca-bundle\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.275657 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-config-data\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.275703 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-fernet-keys\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.314164 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-scripts\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.314251 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-credential-keys\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.315215 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-combined-ca-bundle\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.353873 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmnmh\" (UniqueName: \"kubernetes.io/projected/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-kube-api-access-dmnmh\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.357042 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-config-data\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.358238 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-fernet-keys\") pod \"keystone-bootstrap-zz7dp\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.424950 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.611455 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c" path="/var/lib/kubelet/pods/7cc541b6-f5bd-4c4f-ba2d-7a9ba48fab7c/volumes" Oct 07 19:34:50 crc kubenswrapper[4813]: I1007 19:34:50.612153 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a68b9b31-5c0a-4343-ace1-34ca7f8a54eb" path="/var/lib/kubelet/pods/a68b9b31-5c0a-4343-ace1-34ca7f8a54eb/volumes" Oct 07 19:34:54 crc kubenswrapper[4813]: I1007 19:34:54.361293 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Oct 07 19:34:59 crc kubenswrapper[4813]: I1007 19:34:59.362419 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Oct 07 19:34:59 crc kubenswrapper[4813]: I1007 19:34:59.364959 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.173909 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.268451 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-nb\") pod \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.268548 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-sb\") pod \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.268631 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-svc\") pod \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.268745 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-config\") pod \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.268786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-swift-storage-0\") pod \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.268834 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7fng\" (UniqueName: \"kubernetes.io/projected/4dfa2131-81b6-474b-aa07-08ec422fa6bd-kube-api-access-p7fng\") pod \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\" (UID: \"4dfa2131-81b6-474b-aa07-08ec422fa6bd\") " Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.288510 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dfa2131-81b6-474b-aa07-08ec422fa6bd-kube-api-access-p7fng" (OuterVolumeSpecName: "kube-api-access-p7fng") pod "4dfa2131-81b6-474b-aa07-08ec422fa6bd" (UID: "4dfa2131-81b6-474b-aa07-08ec422fa6bd"). InnerVolumeSpecName "kube-api-access-p7fng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.320036 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4dfa2131-81b6-474b-aa07-08ec422fa6bd" (UID: "4dfa2131-81b6-474b-aa07-08ec422fa6bd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.325315 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4dfa2131-81b6-474b-aa07-08ec422fa6bd" (UID: "4dfa2131-81b6-474b-aa07-08ec422fa6bd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.330788 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4dfa2131-81b6-474b-aa07-08ec422fa6bd" (UID: "4dfa2131-81b6-474b-aa07-08ec422fa6bd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.330865 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-config" (OuterVolumeSpecName: "config") pod "4dfa2131-81b6-474b-aa07-08ec422fa6bd" (UID: "4dfa2131-81b6-474b-aa07-08ec422fa6bd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.339101 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4dfa2131-81b6-474b-aa07-08ec422fa6bd" (UID: "4dfa2131-81b6-474b-aa07-08ec422fa6bd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.371150 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.371181 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.371190 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.371199 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.371208 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7fng\" (UniqueName: \"kubernetes.io/projected/4dfa2131-81b6-474b-aa07-08ec422fa6bd-kube-api-access-p7fng\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.371217 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dfa2131-81b6-474b-aa07-08ec422fa6bd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.637643 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" event={"ID":"4dfa2131-81b6-474b-aa07-08ec422fa6bd","Type":"ContainerDied","Data":"eeb9f4eaa434de40952f0d42bdb9043eb3c1628cb005d23b589a08d114937710"} Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.637736 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" Oct 07 19:35:01 crc kubenswrapper[4813]: E1007 19:35:01.645575 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 07 19:35:01 crc kubenswrapper[4813]: E1007 19:35:01.645830 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jd5z9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-7b94g_openstack(7aa4a319-f846-4044-a663-c75e35168316): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:35:01 crc kubenswrapper[4813]: E1007 19:35:01.647063 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-7b94g" podUID="7aa4a319-f846-4044-a663-c75e35168316" Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.684262 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-vsctw"] Oct 07 19:35:01 crc kubenswrapper[4813]: I1007 19:35:01.699852 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-vsctw"] Oct 07 19:35:02 crc kubenswrapper[4813]: I1007 19:35:02.065401 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-687ddb5b-lwwn2"] Oct 07 19:35:02 crc kubenswrapper[4813]: I1007 19:35:02.615152 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" path="/var/lib/kubelet/pods/4dfa2131-81b6-474b-aa07-08ec422fa6bd/volumes" Oct 07 19:35:02 crc kubenswrapper[4813]: E1007 19:35:02.657860 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-7b94g" podUID="7aa4a319-f846-4044-a663-c75e35168316" Oct 07 19:35:04 crc kubenswrapper[4813]: I1007 19:35:04.365089 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-vsctw" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Oct 07 19:35:04 crc kubenswrapper[4813]: E1007 19:35:04.847767 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 07 19:35:04 crc kubenswrapper[4813]: E1007 19:35:04.848397 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2vfmq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-95b57_openstack(37f756c5-2123-4e5b-9c02-f33dd061d767): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 19:35:04 crc kubenswrapper[4813]: E1007 19:35:04.849659 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-95b57" podUID="37f756c5-2123-4e5b-9c02-f33dd061d767" Oct 07 19:35:04 crc kubenswrapper[4813]: I1007 19:35:04.875755 4813 scope.go:117] "RemoveContainer" containerID="2d918d88af6c606a63b9eae8ddb6d49ec038db52c1498d3d3e963ba4af915986" Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.381931 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-688984b46d-g79nd"] Oct 07 19:35:05 crc kubenswrapper[4813]: W1007 19:35:05.495584 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaed6b0b2_d265_4f3f_a68b_215696e44617.slice/crio-e23fb43d49e9fc4baa43b937116221cbe797ee6185a606bfc7317b8108f85f00 WatchSource:0}: Error finding container e23fb43d49e9fc4baa43b937116221cbe797ee6185a606bfc7317b8108f85f00: Status 404 returned error can't find the container with id e23fb43d49e9fc4baa43b937116221cbe797ee6185a606bfc7317b8108f85f00 Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.558307 4813 scope.go:117] "RemoveContainer" containerID="ca4bf6ed6a57a79b5fa3e35d320f82e69a95a3f2f1dac428bccd806a7cc37339" Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.632598 4813 scope.go:117] "RemoveContainer" containerID="8b6a7195daccc096aa75681a6a63c5cfbd74902e738d147c12dad16ee4c7dcab" Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.699963 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f99dc5487-btwgz" event={"ID":"61eec647-dfb7-4ff8-b143-3823f9aae7bb","Type":"ContainerStarted","Data":"ea25dd4c1cb8562e2993faea0512df3e7ab3b6c717dd15f1057dec7eb5d47285"} Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.701990 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f8458665-fljfq" event={"ID":"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec","Type":"ContainerStarted","Data":"12fc7bc06ed7a26c426b2adf255acd4b79b15add7cbfae069e8a98db9772f414"} Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.703429 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-687ddb5b-lwwn2" event={"ID":"a0b0d403-9a0c-407b-a3d4-a0db3e612092","Type":"ContainerStarted","Data":"833e37c46afeb948968ba4531c4d805e4843fa287733a5b81698943e8b757a9b"} Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.704796 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerStarted","Data":"e23fb43d49e9fc4baa43b937116221cbe797ee6185a606bfc7317b8108f85f00"} Oct 07 19:35:05 crc kubenswrapper[4813]: E1007 19:35:05.714848 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-95b57" podUID="37f756c5-2123-4e5b-9c02-f33dd061d767" Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.759947 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zz7dp"] Oct 07 19:35:05 crc kubenswrapper[4813]: I1007 19:35:05.975132 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.731410 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7479685c4f-mx7hc" event={"ID":"1b704b49-2eba-44ec-8c8f-88801848930a","Type":"ContainerStarted","Data":"1fd31cc3a878b62cedf94beb485599f7d6f255c209dc6e41ec1fd46240ffb75f"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.731634 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7479685c4f-mx7hc" event={"ID":"1b704b49-2eba-44ec-8c8f-88801848930a","Type":"ContainerStarted","Data":"c1dd9a2f324a461e87a23bab3288c44e16ae200f675369bb689f84d6202a5396"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.731758 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7479685c4f-mx7hc" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon-log" containerID="cri-o://c1dd9a2f324a461e87a23bab3288c44e16ae200f675369bb689f84d6202a5396" gracePeriod=30 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.732154 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7479685c4f-mx7hc" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon" containerID="cri-o://1fd31cc3a878b62cedf94beb485599f7d6f255c209dc6e41ec1fd46240ffb75f" gracePeriod=30 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.740720 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-58vdp" event={"ID":"8f039eca-b53a-446b-b219-2b6f2d56a0b4","Type":"ContainerStarted","Data":"d691b8fa255cb3b06af0791bdd2b36ababd7a25b7e0e52f6fd0d37720df16813"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.743547 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"825f92d1-f764-41bf-89ec-a0760b63ebff","Type":"ContainerStarted","Data":"05983812707cee45fbf3a6f60bea7bf2ff2c169c6521d9e461df7fd46720a556"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.743593 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"825f92d1-f764-41bf-89ec-a0760b63ebff","Type":"ContainerStarted","Data":"6949446b9f126f3bbc5ac54ae5b7c1363c14f074d616943d5e5783c9d3e32a85"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.749496 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-687ddb5b-lwwn2" event={"ID":"a0b0d403-9a0c-407b-a3d4-a0db3e612092","Type":"ContainerStarted","Data":"09fc8d6b8f8db537f189bf0bc2613bb56d6dafb56ae7e6bce96d93213c922ce6"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.749536 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-687ddb5b-lwwn2" event={"ID":"a0b0d403-9a0c-407b-a3d4-a0db3e612092","Type":"ContainerStarted","Data":"f15147d5ba97d46b430a9951ecc03bad5814332efd2c186214ac3459fcdbfc0f"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.752611 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"146f3f20-fac7-4547-852e-dff6fde2f507","Type":"ContainerStarted","Data":"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.754713 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3625b48-fada-4ec5-a62b-4ec51555f5b3","Type":"ContainerStarted","Data":"cf70a979e09f162083cfb6e88bb6f4202b83ebcf183a77ac4b76d3e31bfa5400"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.762446 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7479685c4f-mx7hc" podStartSLOduration=2.950941706 podStartE2EDuration="32.762426608s" podCreationTimestamp="2025-10-07 19:34:34 +0000 UTC" firstStartedPulling="2025-10-07 19:34:35.530435892 +0000 UTC m=+1001.608691503" lastFinishedPulling="2025-10-07 19:35:05.341920794 +0000 UTC m=+1031.420176405" observedRunningTime="2025-10-07 19:35:06.758197062 +0000 UTC m=+1032.836452673" watchObservedRunningTime="2025-10-07 19:35:06.762426608 +0000 UTC m=+1032.840682219" Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.768709 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerStarted","Data":"6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.768748 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerStarted","Data":"a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.780969 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz7dp" event={"ID":"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4","Type":"ContainerStarted","Data":"6ffd161112faed47b3718113f640981370ac7eb1e1280fe19d1a46cadf21215e"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.781022 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz7dp" event={"ID":"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4","Type":"ContainerStarted","Data":"a8aad640df12272fe4cbf3bfa7da59891a859e8daadae5ef11311a3f06999129"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.784959 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-58vdp" podStartSLOduration=3.81650566 podStartE2EDuration="36.784944036s" podCreationTimestamp="2025-10-07 19:34:30 +0000 UTC" firstStartedPulling="2025-10-07 19:34:32.607552969 +0000 UTC m=+998.685808570" lastFinishedPulling="2025-10-07 19:35:05.575991335 +0000 UTC m=+1031.654246946" observedRunningTime="2025-10-07 19:35:06.778214891 +0000 UTC m=+1032.856470502" watchObservedRunningTime="2025-10-07 19:35:06.784944036 +0000 UTC m=+1032.863199647" Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.794076 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f99dc5487-btwgz" event={"ID":"61eec647-dfb7-4ff8-b143-3823f9aae7bb","Type":"ContainerStarted","Data":"8d7dede48a0cd2a353036958ce41f08b5faa650bbad1e0ae80c16264077867bb"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.794205 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-f99dc5487-btwgz" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon-log" containerID="cri-o://ea25dd4c1cb8562e2993faea0512df3e7ab3b6c717dd15f1057dec7eb5d47285" gracePeriod=30 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.794275 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-f99dc5487-btwgz" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon" containerID="cri-o://8d7dede48a0cd2a353036958ce41f08b5faa650bbad1e0ae80c16264077867bb" gracePeriod=30 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.798806 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-687ddb5b-lwwn2" podStartSLOduration=27.798789705 podStartE2EDuration="27.798789705s" podCreationTimestamp="2025-10-07 19:34:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:06.796172234 +0000 UTC m=+1032.874427845" watchObservedRunningTime="2025-10-07 19:35:06.798789705 +0000 UTC m=+1032.877045317" Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.803260 4813 generic.go:334] "Generic (PLEG): container finished" podID="68a76fc8-778e-4878-b798-8c21827833b4" containerID="0c3a5d4f667b23b850823ffa9053ac0bbbcc99f2832056baca7c080460401a8a" exitCode=0 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.803317 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wc2qf" event={"ID":"68a76fc8-778e-4878-b798-8c21827833b4","Type":"ContainerDied","Data":"0c3a5d4f667b23b850823ffa9053ac0bbbcc99f2832056baca7c080460401a8a"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.818525 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f8458665-fljfq" event={"ID":"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec","Type":"ContainerStarted","Data":"4900c9a54407705d22dcf343b7be8daf2f39acae814023879db4bba74a790488"} Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.818822 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-79f8458665-fljfq" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon-log" containerID="cri-o://12fc7bc06ed7a26c426b2adf255acd4b79b15add7cbfae069e8a98db9772f414" gracePeriod=30 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.818966 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-79f8458665-fljfq" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon" containerID="cri-o://4900c9a54407705d22dcf343b7be8daf2f39acae814023879db4bba74a790488" gracePeriod=30 Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.820872 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-zz7dp" podStartSLOduration=16.820851351 podStartE2EDuration="16.820851351s" podCreationTimestamp="2025-10-07 19:34:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:06.818967239 +0000 UTC m=+1032.897222850" watchObservedRunningTime="2025-10-07 19:35:06.820851351 +0000 UTC m=+1032.899106962" Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.847107 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-f99dc5487-btwgz" podStartSLOduration=7.555505559 podStartE2EDuration="36.84708408s" podCreationTimestamp="2025-10-07 19:34:30 +0000 UTC" firstStartedPulling="2025-10-07 19:34:31.81464457 +0000 UTC m=+997.892900181" lastFinishedPulling="2025-10-07 19:35:01.106223081 +0000 UTC m=+1027.184478702" observedRunningTime="2025-10-07 19:35:06.837936929 +0000 UTC m=+1032.916192540" watchObservedRunningTime="2025-10-07 19:35:06.84708408 +0000 UTC m=+1032.925339691" Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.858111 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-688984b46d-g79nd" podStartSLOduration=27.858095942 podStartE2EDuration="27.858095942s" podCreationTimestamp="2025-10-07 19:34:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:06.856780366 +0000 UTC m=+1032.935035977" watchObservedRunningTime="2025-10-07 19:35:06.858095942 +0000 UTC m=+1032.936351553" Oct 07 19:35:06 crc kubenswrapper[4813]: I1007 19:35:06.874632 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-79f8458665-fljfq" podStartSLOduration=4.592795334 podStartE2EDuration="36.874618305s" podCreationTimestamp="2025-10-07 19:34:30 +0000 UTC" firstStartedPulling="2025-10-07 19:34:32.619889878 +0000 UTC m=+998.698145489" lastFinishedPulling="2025-10-07 19:35:04.901712809 +0000 UTC m=+1030.979968460" observedRunningTime="2025-10-07 19:35:06.873794773 +0000 UTC m=+1032.952050384" watchObservedRunningTime="2025-10-07 19:35:06.874618305 +0000 UTC m=+1032.952873916" Oct 07 19:35:07 crc kubenswrapper[4813]: I1007 19:35:07.837531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"825f92d1-f764-41bf-89ec-a0760b63ebff","Type":"ContainerStarted","Data":"0d9f0d404ce1ca7da8ab09baec87d7db50f8840239f534262bd171063d95ed82"} Oct 07 19:35:07 crc kubenswrapper[4813]: I1007 19:35:07.843466 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-log" containerID="cri-o://dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294" gracePeriod=30 Oct 07 19:35:07 crc kubenswrapper[4813]: I1007 19:35:07.843541 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"146f3f20-fac7-4547-852e-dff6fde2f507","Type":"ContainerStarted","Data":"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e"} Oct 07 19:35:07 crc kubenswrapper[4813]: I1007 19:35:07.844727 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-httpd" containerID="cri-o://d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e" gracePeriod=30 Oct 07 19:35:07 crc kubenswrapper[4813]: I1007 19:35:07.872775 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=18.872756363 podStartE2EDuration="18.872756363s" podCreationTimestamp="2025-10-07 19:34:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:07.855193621 +0000 UTC m=+1033.933449232" watchObservedRunningTime="2025-10-07 19:35:07.872756363 +0000 UTC m=+1033.951011974" Oct 07 19:35:07 crc kubenswrapper[4813]: I1007 19:35:07.896837 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=30.896818593 podStartE2EDuration="30.896818593s" podCreationTimestamp="2025-10-07 19:34:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:07.892087753 +0000 UTC m=+1033.970343374" watchObservedRunningTime="2025-10-07 19:35:07.896818593 +0000 UTC m=+1033.975074204" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.300600 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.329455 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxnzk\" (UniqueName: \"kubernetes.io/projected/68a76fc8-778e-4878-b798-8c21827833b4-kube-api-access-vxnzk\") pod \"68a76fc8-778e-4878-b798-8c21827833b4\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.329619 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-config\") pod \"68a76fc8-778e-4878-b798-8c21827833b4\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.329641 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-combined-ca-bundle\") pod \"68a76fc8-778e-4878-b798-8c21827833b4\" (UID: \"68a76fc8-778e-4878-b798-8c21827833b4\") " Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.337183 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68a76fc8-778e-4878-b798-8c21827833b4-kube-api-access-vxnzk" (OuterVolumeSpecName: "kube-api-access-vxnzk") pod "68a76fc8-778e-4878-b798-8c21827833b4" (UID: "68a76fc8-778e-4878-b798-8c21827833b4"). InnerVolumeSpecName "kube-api-access-vxnzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.375823 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68a76fc8-778e-4878-b798-8c21827833b4" (UID: "68a76fc8-778e-4878-b798-8c21827833b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.394928 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-config" (OuterVolumeSpecName: "config") pod "68a76fc8-778e-4878-b798-8c21827833b4" (UID: "68a76fc8-778e-4878-b798-8c21827833b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.431511 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.431538 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a76fc8-778e-4878-b798-8c21827833b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.431563 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxnzk\" (UniqueName: \"kubernetes.io/projected/68a76fc8-778e-4878-b798-8c21827833b4-kube-api-access-vxnzk\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.825800 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.872885 4813 generic.go:334] "Generic (PLEG): container finished" podID="146f3f20-fac7-4547-852e-dff6fde2f507" containerID="d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e" exitCode=0 Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.872920 4813 generic.go:334] "Generic (PLEG): container finished" podID="146f3f20-fac7-4547-852e-dff6fde2f507" containerID="dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294" exitCode=143 Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.872972 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"146f3f20-fac7-4547-852e-dff6fde2f507","Type":"ContainerDied","Data":"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e"} Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.872997 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"146f3f20-fac7-4547-852e-dff6fde2f507","Type":"ContainerDied","Data":"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294"} Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.873009 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"146f3f20-fac7-4547-852e-dff6fde2f507","Type":"ContainerDied","Data":"3a84fcc2ffed0030e8200fae020c2d3878a9147e53c88c2ab5db1e20f5103e74"} Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.873029 4813 scope.go:117] "RemoveContainer" containerID="d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.873167 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.888576 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wc2qf" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.888817 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wc2qf" event={"ID":"68a76fc8-778e-4878-b798-8c21827833b4","Type":"ContainerDied","Data":"3de30a04af052adbaa1a45a86e7d6258de60be7b0298b10b199b9fc4c06aae55"} Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.888851 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3de30a04af052adbaa1a45a86e7d6258de60be7b0298b10b199b9fc4c06aae55" Oct 07 19:35:08 crc kubenswrapper[4813]: I1007 19:35:08.939470 4813 scope.go:117] "RemoveContainer" containerID="dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.029842 4813 scope.go:117] "RemoveContainer" containerID="d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e" Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.034516 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e\": container with ID starting with d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e not found: ID does not exist" containerID="d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.034567 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e"} err="failed to get container status \"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e\": rpc error: code = NotFound desc = could not find container \"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e\": container with ID starting with d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e not found: ID does not exist" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.034595 4813 scope.go:117] "RemoveContainer" containerID="dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294" Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.035782 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294\": container with ID starting with dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294 not found: ID does not exist" containerID="dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.035848 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294"} err="failed to get container status \"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294\": rpc error: code = NotFound desc = could not find container \"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294\": container with ID starting with dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294 not found: ID does not exist" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.035874 4813 scope.go:117] "RemoveContainer" containerID="d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.036436 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e"} err="failed to get container status \"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e\": rpc error: code = NotFound desc = could not find container \"d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e\": container with ID starting with d6144c00d1933d759fc1d6af8fda12a735938afa1afc0daff9cde15e656ede7e not found: ID does not exist" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.036475 4813 scope.go:117] "RemoveContainer" containerID="dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.036857 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294"} err="failed to get container status \"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294\": rpc error: code = NotFound desc = could not find container \"dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294\": container with ID starting with dd88586190db57b2ebdb8a007f30b259255afa6307950c7c494e873a3da0e294 not found: ID does not exist" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052157 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn9fr\" (UniqueName: \"kubernetes.io/projected/146f3f20-fac7-4547-852e-dff6fde2f507-kube-api-access-zn9fr\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052234 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-httpd-run\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052346 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052372 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-logs\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052413 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-internal-tls-certs\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052437 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-config-data\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052463 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-scripts\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.052504 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-combined-ca-bundle\") pod \"146f3f20-fac7-4547-852e-dff6fde2f507\" (UID: \"146f3f20-fac7-4547-852e-dff6fde2f507\") " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.053196 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-logs" (OuterVolumeSpecName: "logs") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.053734 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.065576 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-scripts" (OuterVolumeSpecName: "scripts") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.083571 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.083990 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/146f3f20-fac7-4547-852e-dff6fde2f507-kube-api-access-zn9fr" (OuterVolumeSpecName: "kube-api-access-zn9fr") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "kube-api-access-zn9fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.156818 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.156852 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.156861 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.156870 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn9fr\" (UniqueName: \"kubernetes.io/projected/146f3f20-fac7-4547-852e-dff6fde2f507-kube-api-access-zn9fr\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.156880 4813 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/146f3f20-fac7-4547-852e-dff6fde2f507-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.250818 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.268455 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.270861 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.270895 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.310543 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-rkmzm"] Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.310987 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="init" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311006 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="init" Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.311023 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311029 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.311040 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-log" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311046 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-log" Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.311073 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-httpd" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311079 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-httpd" Oct 07 19:35:09 crc kubenswrapper[4813]: E1007 19:35:09.311090 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68a76fc8-778e-4878-b798-8c21827833b4" containerName="neutron-db-sync" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311096 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="68a76fc8-778e-4878-b798-8c21827833b4" containerName="neutron-db-sync" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311260 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-httpd" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311280 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="68a76fc8-778e-4878-b798-8c21827833b4" containerName="neutron-db-sync" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311295 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" containerName="glance-log" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.311306 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dfa2131-81b6-474b-aa07-08ec422fa6bd" containerName="dnsmasq-dns" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.312215 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.329391 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.337370 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8476d947c6-lsgxz"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.338903 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.344111 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.344365 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.344499 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-flr6f" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.344646 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.371933 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-rkmzm"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.375145 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.375261 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-config\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.377627 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-config-data" (OuterVolumeSpecName: "config-data") pod "146f3f20-fac7-4547-852e-dff6fde2f507" (UID: "146f3f20-fac7-4547-852e-dff6fde2f507"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.377738 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.377851 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-combined-ca-bundle\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.377908 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378004 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378056 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxj22\" (UniqueName: \"kubernetes.io/projected/84915478-8abb-40dd-bb9a-4c623f742063-kube-api-access-sxj22\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378079 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-httpd-config\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378120 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-config\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378146 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w76tl\" (UniqueName: \"kubernetes.io/projected/f24d064e-e0af-428b-a988-9850845b32e4-kube-api-access-w76tl\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378231 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-ovndb-tls-certs\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378411 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.378428 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/146f3f20-fac7-4547-852e-dff6fde2f507-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.397104 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8476d947c6-lsgxz"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-config\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479706 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w76tl\" (UniqueName: \"kubernetes.io/projected/f24d064e-e0af-428b-a988-9850845b32e4-kube-api-access-w76tl\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479744 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-ovndb-tls-certs\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479764 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479782 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-config\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479828 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479868 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-combined-ca-bundle\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479889 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479924 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479946 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxj22\" (UniqueName: \"kubernetes.io/projected/84915478-8abb-40dd-bb9a-4c623f742063-kube-api-access-sxj22\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.479962 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-httpd-config\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.491578 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-httpd-config\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.497624 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-config\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.498290 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.498841 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.499224 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.500187 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.505291 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-ovndb-tls-certs\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.507134 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-config\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.507797 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-combined-ca-bundle\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.538750 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxj22\" (UniqueName: \"kubernetes.io/projected/84915478-8abb-40dd-bb9a-4c623f742063-kube-api-access-sxj22\") pod \"dnsmasq-dns-84b966f6c9-rkmzm\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.546523 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.555817 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w76tl\" (UniqueName: \"kubernetes.io/projected/f24d064e-e0af-428b-a988-9850845b32e4-kube-api-access-w76tl\") pod \"neutron-8476d947c6-lsgxz\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.568049 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.579358 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.581027 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.585873 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.586788 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.587048 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.662754 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683782 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683828 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683864 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683901 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkcvd\" (UniqueName: \"kubernetes.io/projected/8e5ad266-d270-4685-81e4-85c81a1853fb-kube-api-access-xkcvd\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683920 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683937 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.683970 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.684009 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.690744 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788309 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788602 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788628 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788658 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788696 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkcvd\" (UniqueName: \"kubernetes.io/projected/8e5ad266-d270-4685-81e4-85c81a1853fb-kube-api-access-xkcvd\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788715 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788732 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788771 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.788918 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.790959 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.791176 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.801308 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.802627 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.803240 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.804148 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.826103 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkcvd\" (UniqueName: \"kubernetes.io/projected/8e5ad266-d270-4685-81e4-85c81a1853fb-kube-api-access-xkcvd\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.845282 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.911822 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.993465 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:35:09 crc kubenswrapper[4813]: I1007 19:35:09.994481 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.006579 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.006622 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.123033 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.123184 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.143653 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.277880 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.433973 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-rkmzm"] Oct 07 19:35:10 crc kubenswrapper[4813]: W1007 19:35:10.463992 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84915478_8abb_40dd_bb9a_4c623f742063.slice/crio-d329df4b8efadeedc1d7606256f14a047fc02e384a61b47de72d22f620b7e25f WatchSource:0}: Error finding container d329df4b8efadeedc1d7606256f14a047fc02e384a61b47de72d22f620b7e25f: Status 404 returned error can't find the container with id d329df4b8efadeedc1d7606256f14a047fc02e384a61b47de72d22f620b7e25f Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.712782 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="146f3f20-fac7-4547-852e-dff6fde2f507" path="/var/lib/kubelet/pods/146f3f20-fac7-4547-852e-dff6fde2f507/volumes" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.764450 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:35:10 crc kubenswrapper[4813]: I1007 19:35:10.857547 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8476d947c6-lsgxz"] Oct 07 19:35:11 crc kubenswrapper[4813]: I1007 19:35:11.005439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" event={"ID":"84915478-8abb-40dd-bb9a-4c623f742063","Type":"ContainerStarted","Data":"d329df4b8efadeedc1d7606256f14a047fc02e384a61b47de72d22f620b7e25f"} Oct 07 19:35:11 crc kubenswrapper[4813]: I1007 19:35:11.015806 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8476d947c6-lsgxz" event={"ID":"f24d064e-e0af-428b-a988-9850845b32e4","Type":"ContainerStarted","Data":"01691d1b5ea52b3cd4fdb7e87fb2e8ab313259741ee06d0d512fb940e2fff787"} Oct 07 19:35:11 crc kubenswrapper[4813]: I1007 19:35:11.016588 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 19:35:11 crc kubenswrapper[4813]: I1007 19:35:11.016610 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 19:35:11 crc kubenswrapper[4813]: I1007 19:35:11.075762 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:35:11 crc kubenswrapper[4813]: I1007 19:35:11.113557 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.047761 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8476d947c6-lsgxz" event={"ID":"f24d064e-e0af-428b-a988-9850845b32e4","Type":"ContainerStarted","Data":"8fd244f967228cde51b33731d7616c65680df3932fd4482af26a4a17dc661ac7"} Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.048315 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8476d947c6-lsgxz" event={"ID":"f24d064e-e0af-428b-a988-9850845b32e4","Type":"ContainerStarted","Data":"47ef3467562b47038a42a674d54ca8b537ddf4767184e9344268458fd349c955"} Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.049094 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.061710 4813 generic.go:334] "Generic (PLEG): container finished" podID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" containerID="d691b8fa255cb3b06af0791bdd2b36ababd7a25b7e0e52f6fd0d37720df16813" exitCode=0 Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.061773 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-58vdp" event={"ID":"8f039eca-b53a-446b-b219-2b6f2d56a0b4","Type":"ContainerDied","Data":"d691b8fa255cb3b06af0791bdd2b36ababd7a25b7e0e52f6fd0d37720df16813"} Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.074558 4813 generic.go:334] "Generic (PLEG): container finished" podID="84915478-8abb-40dd-bb9a-4c623f742063" containerID="c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f" exitCode=0 Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.074771 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" event={"ID":"84915478-8abb-40dd-bb9a-4c623f742063","Type":"ContainerDied","Data":"c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f"} Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.084228 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-8476d947c6-lsgxz" podStartSLOduration=3.084211421 podStartE2EDuration="3.084211421s" podCreationTimestamp="2025-10-07 19:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:12.073149377 +0000 UTC m=+1038.151404998" watchObservedRunningTime="2025-10-07 19:35:12.084211421 +0000 UTC m=+1038.162467032" Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.094342 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8e5ad266-d270-4685-81e4-85c81a1853fb","Type":"ContainerStarted","Data":"297bbe6065a680be07f4512a4e6250711142778fb47889e205be640811d52334"} Oct 07 19:35:12 crc kubenswrapper[4813]: I1007 19:35:12.094378 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8e5ad266-d270-4685-81e4-85c81a1853fb","Type":"ContainerStarted","Data":"6ca08a0f4cdd549e098639288f800d99a8fd9f1158ce09c799e57104b35fd816"} Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.115760 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8e5ad266-d270-4685-81e4-85c81a1853fb","Type":"ContainerStarted","Data":"5e911e5b0d8acb81490e42113315e655e725676e24019c286d83bbf88af49d75"} Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.116742 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.143010 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.142989403 podStartE2EDuration="4.142989403s" podCreationTimestamp="2025-10-07 19:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:13.140549096 +0000 UTC m=+1039.218804707" watchObservedRunningTime="2025-10-07 19:35:13.142989403 +0000 UTC m=+1039.221245014" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.379980 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5496dd8845-nwmf5"] Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.381741 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.385464 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.385742 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.404854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5496dd8845-nwmf5"] Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.440816 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-httpd-config\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.440898 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-combined-ca-bundle\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.440949 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jg97\" (UniqueName: \"kubernetes.io/projected/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-kube-api-access-5jg97\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.440976 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-ovndb-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.441021 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-config\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.441063 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-public-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.441084 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-internal-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.543657 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-combined-ca-bundle\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.543996 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jg97\" (UniqueName: \"kubernetes.io/projected/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-kube-api-access-5jg97\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.544022 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-ovndb-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.544063 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-config\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.544102 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-public-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.544125 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-internal-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.544161 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-httpd-config\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.566410 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-ovndb-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.570966 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-internal-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.581371 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-public-tls-certs\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.583121 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-combined-ca-bundle\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.583472 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-httpd-config\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.614186 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jg97\" (UniqueName: \"kubernetes.io/projected/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-kube-api-access-5jg97\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.615556 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/013c7ce7-ad1b-4f61-920b-f5c5f685dcd7-config\") pod \"neutron-5496dd8845-nwmf5\" (UID: \"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7\") " pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:13 crc kubenswrapper[4813]: I1007 19:35:13.719785 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:14 crc kubenswrapper[4813]: I1007 19:35:14.831952 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.786645 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-58vdp" Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.911647 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-scripts\") pod \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.911697 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-config-data\") pod \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.911820 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-combined-ca-bundle\") pod \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.911922 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7kq6\" (UniqueName: \"kubernetes.io/projected/8f039eca-b53a-446b-b219-2b6f2d56a0b4-kube-api-access-x7kq6\") pod \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.912494 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f039eca-b53a-446b-b219-2b6f2d56a0b4-logs\") pod \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\" (UID: \"8f039eca-b53a-446b-b219-2b6f2d56a0b4\") " Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.913094 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f039eca-b53a-446b-b219-2b6f2d56a0b4-logs" (OuterVolumeSpecName: "logs") pod "8f039eca-b53a-446b-b219-2b6f2d56a0b4" (UID: "8f039eca-b53a-446b-b219-2b6f2d56a0b4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.918162 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f039eca-b53a-446b-b219-2b6f2d56a0b4-kube-api-access-x7kq6" (OuterVolumeSpecName: "kube-api-access-x7kq6") pod "8f039eca-b53a-446b-b219-2b6f2d56a0b4" (UID: "8f039eca-b53a-446b-b219-2b6f2d56a0b4"). InnerVolumeSpecName "kube-api-access-x7kq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.934626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-scripts" (OuterVolumeSpecName: "scripts") pod "8f039eca-b53a-446b-b219-2b6f2d56a0b4" (UID: "8f039eca-b53a-446b-b219-2b6f2d56a0b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:15 crc kubenswrapper[4813]: I1007 19:35:15.973296 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-config-data" (OuterVolumeSpecName: "config-data") pod "8f039eca-b53a-446b-b219-2b6f2d56a0b4" (UID: "8f039eca-b53a-446b-b219-2b6f2d56a0b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.017383 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7kq6\" (UniqueName: \"kubernetes.io/projected/8f039eca-b53a-446b-b219-2b6f2d56a0b4-kube-api-access-x7kq6\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.017415 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f039eca-b53a-446b-b219-2b6f2d56a0b4-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.017425 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.017434 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.023425 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f039eca-b53a-446b-b219-2b6f2d56a0b4" (UID: "8f039eca-b53a-446b-b219-2b6f2d56a0b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.119559 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f039eca-b53a-446b-b219-2b6f2d56a0b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.149696 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-58vdp" event={"ID":"8f039eca-b53a-446b-b219-2b6f2d56a0b4","Type":"ContainerDied","Data":"50b1f8838f79de01af0298fe712f05921dd89c6f8e461bad6affe10c2fe97704"} Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.149733 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50b1f8838f79de01af0298fe712f05921dd89c6f8e461bad6affe10c2fe97704" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.149816 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-58vdp" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.912581 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-bd4864b74-5mp8m"] Oct 07 19:35:16 crc kubenswrapper[4813]: E1007 19:35:16.913160 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" containerName="placement-db-sync" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.913172 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" containerName="placement-db-sync" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.913351 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" containerName="placement-db-sync" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.914201 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.921220 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.921300 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.923927 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-wx9pw" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.924095 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.924316 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 07 19:35:16 crc kubenswrapper[4813]: I1007 19:35:16.951080 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-bd4864b74-5mp8m"] Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049015 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-combined-ca-bundle\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049091 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83739b1f-81fa-4e83-baea-f75bae3f1ea5-logs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049174 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-internal-tls-certs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049230 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-scripts\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049281 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-public-tls-certs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049347 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-config-data\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.049390 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqfzh\" (UniqueName: \"kubernetes.io/projected/83739b1f-81fa-4e83-baea-f75bae3f1ea5-kube-api-access-rqfzh\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.150927 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-config-data\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.151000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqfzh\" (UniqueName: \"kubernetes.io/projected/83739b1f-81fa-4e83-baea-f75bae3f1ea5-kube-api-access-rqfzh\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.151061 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-combined-ca-bundle\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.151084 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83739b1f-81fa-4e83-baea-f75bae3f1ea5-logs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.151155 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-internal-tls-certs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.151206 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-scripts\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.151233 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-public-tls-certs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.157598 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83739b1f-81fa-4e83-baea-f75bae3f1ea5-logs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.163244 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-combined-ca-bundle\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.176474 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-internal-tls-certs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.181626 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-public-tls-certs\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.181886 4813 generic.go:334] "Generic (PLEG): container finished" podID="0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" containerID="6ffd161112faed47b3718113f640981370ac7eb1e1280fe19d1a46cadf21215e" exitCode=0 Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.181930 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz7dp" event={"ID":"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4","Type":"ContainerDied","Data":"6ffd161112faed47b3718113f640981370ac7eb1e1280fe19d1a46cadf21215e"} Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.183009 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-config-data\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.184944 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqfzh\" (UniqueName: \"kubernetes.io/projected/83739b1f-81fa-4e83-baea-f75bae3f1ea5-kube-api-access-rqfzh\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.197472 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83739b1f-81fa-4e83-baea-f75bae3f1ea5-scripts\") pod \"placement-bd4864b74-5mp8m\" (UID: \"83739b1f-81fa-4e83-baea-f75bae3f1ea5\") " pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:17 crc kubenswrapper[4813]: I1007 19:35:17.246258 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:19 crc kubenswrapper[4813]: I1007 19:35:19.267145 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 19:35:19 crc kubenswrapper[4813]: I1007 19:35:19.268798 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:35:19 crc kubenswrapper[4813]: I1007 19:35:19.319583 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 19:35:19 crc kubenswrapper[4813]: I1007 19:35:19.913606 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:19 crc kubenswrapper[4813]: I1007 19:35:19.913651 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:19 crc kubenswrapper[4813]: I1007 19:35:19.994461 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.054608 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.108958 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.125045 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-687ddb5b-lwwn2" podUID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.136523 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.204144 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-fernet-keys\") pod \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.204191 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmnmh\" (UniqueName: \"kubernetes.io/projected/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-kube-api-access-dmnmh\") pod \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.204248 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-combined-ca-bundle\") pod \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.204294 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-credential-keys\") pod \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.204354 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-scripts\") pod \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.204432 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-config-data\") pod \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\" (UID: \"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4\") " Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.238567 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-kube-api-access-dmnmh" (OuterVolumeSpecName: "kube-api-access-dmnmh") pod "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" (UID: "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4"). InnerVolumeSpecName "kube-api-access-dmnmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.241665 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-scripts" (OuterVolumeSpecName: "scripts") pod "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" (UID: "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.245493 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" (UID: "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.261197 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" (UID: "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.269228 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz7dp" event={"ID":"0afdc9e8-75a0-4ee8-971b-f2390ea1eff4","Type":"ContainerDied","Data":"a8aad640df12272fe4cbf3bfa7da59891a859e8daadae5ef11311a3f06999129"} Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.269294 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8aad640df12272fe4cbf3bfa7da59891a859e8daadae5ef11311a3f06999129" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.308460 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz7dp" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.320824 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.320869 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.327115 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.332098 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmnmh\" (UniqueName: \"kubernetes.io/projected/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-kube-api-access-dmnmh\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.332126 4813 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.332136 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.409155 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-config-data" (OuterVolumeSpecName: "config-data") pod "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" (UID: "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.468517 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.546805 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" (UID: "0afdc9e8-75a0-4ee8-971b-f2390ea1eff4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.582857 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:20 crc kubenswrapper[4813]: I1007 19:35:20.907542 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-bd4864b74-5mp8m"] Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.178089 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5496dd8845-nwmf5"] Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.281224 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-68ff4bb5b-nhpkd"] Oct 07 19:35:21 crc kubenswrapper[4813]: E1007 19:35:21.281609 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" containerName="keystone-bootstrap" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.281621 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" containerName="keystone-bootstrap" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.281820 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" containerName="keystone-bootstrap" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.284497 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.290963 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.291182 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-gbnk5" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.291355 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.291559 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.292199 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.292552 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331009 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-fernet-keys\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331045 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-credential-keys\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331067 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-config-data\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331082 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-public-tls-certs\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331137 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-combined-ca-bundle\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331172 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv5v5\" (UniqueName: \"kubernetes.io/projected/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-kube-api-access-fv5v5\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331191 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-internal-tls-certs\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.331219 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-scripts\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.348509 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-68ff4bb5b-nhpkd"] Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.367658 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" event={"ID":"84915478-8abb-40dd-bb9a-4c623f742063","Type":"ContainerStarted","Data":"d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec"} Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.368687 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.394619 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" podStartSLOduration=12.3946027 podStartE2EDuration="12.3946027s" podCreationTimestamp="2025-10-07 19:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:21.391807934 +0000 UTC m=+1047.470063545" watchObservedRunningTime="2025-10-07 19:35:21.3946027 +0000 UTC m=+1047.472858311" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.398755 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5496dd8845-nwmf5" event={"ID":"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7","Type":"ContainerStarted","Data":"f723ce076f421d21514bbd4ed0eb2c506a5f0797880602cd1251edc4b2e1fc3f"} Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.411226 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bd4864b74-5mp8m" event={"ID":"83739b1f-81fa-4e83-baea-f75bae3f1ea5","Type":"ContainerStarted","Data":"c6d701986648c73cd0602457fc3ba3e51ba656f3e85259f769d2ea1a4405d0a8"} Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433135 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-combined-ca-bundle\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433211 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv5v5\" (UniqueName: \"kubernetes.io/projected/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-kube-api-access-fv5v5\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433239 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-internal-tls-certs\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433287 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-scripts\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433416 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-fernet-keys\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433434 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-credential-keys\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433484 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-config-data\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.433521 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-public-tls-certs\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.441895 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-internal-tls-certs\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.447886 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-public-tls-certs\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.448462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-combined-ca-bundle\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.448735 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-scripts\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.451031 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-fernet-keys\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.453641 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-credential-keys\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.459698 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-config-data\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.464274 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv5v5\" (UniqueName: \"kubernetes.io/projected/dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3-kube-api-access-fv5v5\") pod \"keystone-68ff4bb5b-nhpkd\" (UID: \"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3\") " pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:21 crc kubenswrapper[4813]: I1007 19:35:21.625276 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.243596 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-68ff4bb5b-nhpkd"] Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.424870 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-68ff4bb5b-nhpkd" event={"ID":"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3","Type":"ContainerStarted","Data":"9bd2d87674fab26c132e360f8250448d0640184781572c151b945d19bc940827"} Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.437539 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5496dd8845-nwmf5" event={"ID":"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7","Type":"ContainerStarted","Data":"c399769828cc14f6faef0e83af3931f73cd95a8982786ab9511b79e3770c9f25"} Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.439136 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bd4864b74-5mp8m" event={"ID":"83739b1f-81fa-4e83-baea-f75bae3f1ea5","Type":"ContainerStarted","Data":"cdedbb8c07bdf94feeb45186c9cfb62fc1c669de53273b2dde437325abfaf93b"} Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.440255 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3625b48-fada-4ec5-a62b-4ec51555f5b3","Type":"ContainerStarted","Data":"6708601e7225ba5d88f6588cafa2fe938487716a40df5de4963abc278f9a6bd8"} Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.447350 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7b94g" event={"ID":"7aa4a319-f846-4044-a663-c75e35168316","Type":"ContainerStarted","Data":"42c2e9110de38426cafde0757204fc89c4f9e9f0719a2595b44eef8eabcbe6c8"} Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.447430 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.447443 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:35:22 crc kubenswrapper[4813]: I1007 19:35:22.508461 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-7b94g" podStartSLOduration=4.013433891 podStartE2EDuration="51.508444802s" podCreationTimestamp="2025-10-07 19:34:31 +0000 UTC" firstStartedPulling="2025-10-07 19:34:32.872195398 +0000 UTC m=+998.950451009" lastFinishedPulling="2025-10-07 19:35:20.367206309 +0000 UTC m=+1046.445461920" observedRunningTime="2025-10-07 19:35:22.497659516 +0000 UTC m=+1048.575915127" watchObservedRunningTime="2025-10-07 19:35:22.508444802 +0000 UTC m=+1048.586700413" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.474811 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-68ff4bb5b-nhpkd" event={"ID":"dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3","Type":"ContainerStarted","Data":"fd06c4d47a3cbf82227464e07377aa7aa6104c1ab8628958b81338fdc21ab0ce"} Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.476055 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.479133 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5496dd8845-nwmf5" event={"ID":"013c7ce7-ad1b-4f61-920b-f5c5f685dcd7","Type":"ContainerStarted","Data":"e7e099d78d3f12c6f7a626aa9e62f9e126e82cabd74699b7a0374d03de850592"} Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.479365 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.491243 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-bd4864b74-5mp8m" event={"ID":"83739b1f-81fa-4e83-baea-f75bae3f1ea5","Type":"ContainerStarted","Data":"927a84ac10d3867132fcc441eccb11a33ec5b68d923eb2f5d0b02cbe281379b6"} Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.491409 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.491425 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.495372 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-95b57" event={"ID":"37f756c5-2123-4e5b-9c02-f33dd061d767","Type":"ContainerStarted","Data":"143c3cee153b0f5795746ee4a66869d82db3cc6a0f7cb52fe7e2148858195334"} Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.500123 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-68ff4bb5b-nhpkd" podStartSLOduration=2.500102203 podStartE2EDuration="2.500102203s" podCreationTimestamp="2025-10-07 19:35:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:23.494252492 +0000 UTC m=+1049.572508103" watchObservedRunningTime="2025-10-07 19:35:23.500102203 +0000 UTC m=+1049.578357814" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.524540 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-bd4864b74-5mp8m" podStartSLOduration=7.524521682 podStartE2EDuration="7.524521682s" podCreationTimestamp="2025-10-07 19:35:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:23.5189726 +0000 UTC m=+1049.597228211" watchObservedRunningTime="2025-10-07 19:35:23.524521682 +0000 UTC m=+1049.602777293" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.581518 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-95b57" podStartSLOduration=4.748586715 podStartE2EDuration="52.581500035s" podCreationTimestamp="2025-10-07 19:34:31 +0000 UTC" firstStartedPulling="2025-10-07 19:34:32.755020394 +0000 UTC m=+998.833276005" lastFinishedPulling="2025-10-07 19:35:20.587933714 +0000 UTC m=+1046.666189325" observedRunningTime="2025-10-07 19:35:23.577602368 +0000 UTC m=+1049.655857979" watchObservedRunningTime="2025-10-07 19:35:23.581500035 +0000 UTC m=+1049.659755646" Oct 07 19:35:23 crc kubenswrapper[4813]: I1007 19:35:23.583949 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5496dd8845-nwmf5" podStartSLOduration=10.583943182 podStartE2EDuration="10.583943182s" podCreationTimestamp="2025-10-07 19:35:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:23.558627708 +0000 UTC m=+1049.636883319" watchObservedRunningTime="2025-10-07 19:35:23.583943182 +0000 UTC m=+1049.662198783" Oct 07 19:35:24 crc kubenswrapper[4813]: I1007 19:35:24.942513 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:24 crc kubenswrapper[4813]: I1007 19:35:24.942935 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:35:24 crc kubenswrapper[4813]: I1007 19:35:24.951515 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 19:35:29 crc kubenswrapper[4813]: I1007 19:35:29.664520 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:29 crc kubenswrapper[4813]: I1007 19:35:29.730381 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-qlbgn"] Oct 07 19:35:29 crc kubenswrapper[4813]: I1007 19:35:29.730615 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="dnsmasq-dns" containerID="cri-o://6421289613ac9e634579a0cb4f21c786241acc6bf2daa80ad64e96f5fbd39467" gracePeriod=10 Oct 07 19:35:29 crc kubenswrapper[4813]: I1007 19:35:29.986847 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:35:30 crc kubenswrapper[4813]: I1007 19:35:30.133494 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-687ddb5b-lwwn2" podUID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 07 19:35:30 crc kubenswrapper[4813]: I1007 19:35:30.581478 4813 generic.go:334] "Generic (PLEG): container finished" podID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerID="6421289613ac9e634579a0cb4f21c786241acc6bf2daa80ad64e96f5fbd39467" exitCode=0 Oct 07 19:35:30 crc kubenswrapper[4813]: I1007 19:35:30.581529 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" event={"ID":"6eacad55-fc64-4e75-b743-b106ce2d7c0d","Type":"ContainerDied","Data":"6421289613ac9e634579a0cb4f21c786241acc6bf2daa80ad64e96f5fbd39467"} Oct 07 19:35:31 crc kubenswrapper[4813]: I1007 19:35:31.341886 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.143:5353: connect: connection refused" Oct 07 19:35:32 crc kubenswrapper[4813]: I1007 19:35:32.619367 4813 generic.go:334] "Generic (PLEG): container finished" podID="7aa4a319-f846-4044-a663-c75e35168316" containerID="42c2e9110de38426cafde0757204fc89c4f9e9f0719a2595b44eef8eabcbe6c8" exitCode=0 Oct 07 19:35:32 crc kubenswrapper[4813]: I1007 19:35:32.619431 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7b94g" event={"ID":"7aa4a319-f846-4044-a663-c75e35168316","Type":"ContainerDied","Data":"42c2e9110de38426cafde0757204fc89c4f9e9f0719a2595b44eef8eabcbe6c8"} Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.602874 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7b94g" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.640475 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd5z9\" (UniqueName: \"kubernetes.io/projected/7aa4a319-f846-4044-a663-c75e35168316-kube-api-access-jd5z9\") pod \"7aa4a319-f846-4044-a663-c75e35168316\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.652736 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7b94g" event={"ID":"7aa4a319-f846-4044-a663-c75e35168316","Type":"ContainerDied","Data":"aa7d6d2815fefd90b8b4295a62fd75d0653a846acc20b757169fabe26b39ab85"} Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.652773 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa7d6d2815fefd90b8b4295a62fd75d0653a846acc20b757169fabe26b39ab85" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.652822 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7b94g" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.664520 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aa4a319-f846-4044-a663-c75e35168316-kube-api-access-jd5z9" (OuterVolumeSpecName: "kube-api-access-jd5z9") pod "7aa4a319-f846-4044-a663-c75e35168316" (UID: "7aa4a319-f846-4044-a663-c75e35168316"). InnerVolumeSpecName "kube-api-access-jd5z9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.742182 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-db-sync-config-data\") pod \"7aa4a319-f846-4044-a663-c75e35168316\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.742242 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-combined-ca-bundle\") pod \"7aa4a319-f846-4044-a663-c75e35168316\" (UID: \"7aa4a319-f846-4044-a663-c75e35168316\") " Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.742819 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd5z9\" (UniqueName: \"kubernetes.io/projected/7aa4a319-f846-4044-a663-c75e35168316-kube-api-access-jd5z9\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.784506 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7aa4a319-f846-4044-a663-c75e35168316" (UID: "7aa4a319-f846-4044-a663-c75e35168316"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.808241 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7aa4a319-f846-4044-a663-c75e35168316" (UID: "7aa4a319-f846-4044-a663-c75e35168316"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.844368 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.844400 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa4a319-f846-4044-a663-c75e35168316-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.975532 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-d696dd678-l487w"] Oct 07 19:35:34 crc kubenswrapper[4813]: E1007 19:35:34.979510 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aa4a319-f846-4044-a663-c75e35168316" containerName="barbican-db-sync" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.979530 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aa4a319-f846-4044-a663-c75e35168316" containerName="barbican-db-sync" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.981365 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aa4a319-f846-4044-a663-c75e35168316" containerName="barbican-db-sync" Oct 07 19:35:34 crc kubenswrapper[4813]: I1007 19:35:34.985234 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.002239 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.002411 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.002547 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nz29z" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.023239 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-664466bb6c-ldqlb"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.024863 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.041893 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.042081 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d696dd678-l487w"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.059674 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-664466bb6c-ldqlb"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.159283 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lgzz\" (UniqueName: \"kubernetes.io/projected/584974f4-f44d-4f67-b675-9b0fb29be7f3-kube-api-access-9lgzz\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.159688 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-config-data\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.159769 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-combined-ca-bundle\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.159847 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-combined-ca-bundle\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.159940 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-config-data\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.160012 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-config-data-custom\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.160121 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97r5v\" (UniqueName: \"kubernetes.io/projected/dc06daa5-4a82-4b6c-bc77-2d40de999f15-kube-api-access-97r5v\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.160206 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc06daa5-4a82-4b6c-bc77-2d40de999f15-logs\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.160289 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/584974f4-f44d-4f67-b675-9b0fb29be7f3-logs\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.160394 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-config-data-custom\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.173103 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-xrngt"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.174675 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.215382 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-xrngt"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262526 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97r5v\" (UniqueName: \"kubernetes.io/projected/dc06daa5-4a82-4b6c-bc77-2d40de999f15-kube-api-access-97r5v\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262569 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc06daa5-4a82-4b6c-bc77-2d40de999f15-logs\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262595 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/584974f4-f44d-4f67-b675-9b0fb29be7f3-logs\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262639 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-config-data-custom\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262692 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lgzz\" (UniqueName: \"kubernetes.io/projected/584974f4-f44d-4f67-b675-9b0fb29be7f3-kube-api-access-9lgzz\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262709 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-config-data\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262733 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-combined-ca-bundle\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262755 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-combined-ca-bundle\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262792 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-config-data\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.262818 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-config-data-custom\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.276001 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc06daa5-4a82-4b6c-bc77-2d40de999f15-logs\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.279857 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/584974f4-f44d-4f67-b675-9b0fb29be7f3-logs\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.284593 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-combined-ca-bundle\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.297987 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-config-data-custom\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.299696 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-config-data\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.316881 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-combined-ca-bundle\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.318974 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97r5v\" (UniqueName: \"kubernetes.io/projected/dc06daa5-4a82-4b6c-bc77-2d40de999f15-kube-api-access-97r5v\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.318975 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/584974f4-f44d-4f67-b675-9b0fb29be7f3-config-data-custom\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.320500 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lgzz\" (UniqueName: \"kubernetes.io/projected/584974f4-f44d-4f67-b675-9b0fb29be7f3-kube-api-access-9lgzz\") pod \"barbican-keystone-listener-d696dd678-l487w\" (UID: \"584974f4-f44d-4f67-b675-9b0fb29be7f3\") " pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.320573 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc06daa5-4a82-4b6c-bc77-2d40de999f15-config-data\") pod \"barbican-worker-664466bb6c-ldqlb\" (UID: \"dc06daa5-4a82-4b6c-bc77-2d40de999f15\") " pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.366434 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czccn\" (UniqueName: \"kubernetes.io/projected/7ccb8a83-9c52-4caf-8371-596b60188018-kube-api-access-czccn\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.366523 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.366601 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.366651 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.366672 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.366690 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-config\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.448361 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-f487749db-z8h9h"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.449799 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.473822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.473898 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.473945 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.473965 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.473985 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-config\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.474012 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czccn\" (UniqueName: \"kubernetes.io/projected/7ccb8a83-9c52-4caf-8371-596b60188018-kube-api-access-czccn\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.474986 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.474999 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.475585 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.475592 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.475749 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.476081 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-config\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.482422 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f487749db-z8h9h"] Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.510008 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czccn\" (UniqueName: \"kubernetes.io/projected/7ccb8a83-9c52-4caf-8371-596b60188018-kube-api-access-czccn\") pod \"dnsmasq-dns-75c8ddd69c-xrngt\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.578243 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8705c88f-a04a-4861-9e64-05bf5e90237f-logs\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.578277 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data-custom\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.578305 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ttpr\" (UniqueName: \"kubernetes.io/projected/8705c88f-a04a-4861-9e64-05bf5e90237f-kube-api-access-2ttpr\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.578388 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-combined-ca-bundle\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.578424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.646627 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d696dd678-l487w" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.660662 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.679707 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ttpr\" (UniqueName: \"kubernetes.io/projected/8705c88f-a04a-4861-9e64-05bf5e90237f-kube-api-access-2ttpr\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.679801 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-combined-ca-bundle\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.679841 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.679906 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8705c88f-a04a-4861-9e64-05bf5e90237f-logs\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.679925 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data-custom\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.681521 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8705c88f-a04a-4861-9e64-05bf5e90237f-logs\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.693788 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-combined-ca-bundle\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.699827 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.700718 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ttpr\" (UniqueName: \"kubernetes.io/projected/8705c88f-a04a-4861-9e64-05bf5e90237f-kube-api-access-2ttpr\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.709813 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data-custom\") pod \"barbican-api-f487749db-z8h9h\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.776588 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-664466bb6c-ldqlb" Oct 07 19:35:35 crc kubenswrapper[4813]: I1007 19:35:35.994721 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.701030 4813 generic.go:334] "Generic (PLEG): container finished" podID="1b704b49-2eba-44ec-8c8f-88801848930a" containerID="1fd31cc3a878b62cedf94beb485599f7d6f255c209dc6e41ec1fd46240ffb75f" exitCode=137 Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.701296 4813 generic.go:334] "Generic (PLEG): container finished" podID="1b704b49-2eba-44ec-8c8f-88801848930a" containerID="c1dd9a2f324a461e87a23bab3288c44e16ae200f675369bb689f84d6202a5396" exitCode=137 Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.701098 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7479685c4f-mx7hc" event={"ID":"1b704b49-2eba-44ec-8c8f-88801848930a","Type":"ContainerDied","Data":"1fd31cc3a878b62cedf94beb485599f7d6f255c209dc6e41ec1fd46240ffb75f"} Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.701415 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7479685c4f-mx7hc" event={"ID":"1b704b49-2eba-44ec-8c8f-88801848930a","Type":"ContainerDied","Data":"c1dd9a2f324a461e87a23bab3288c44e16ae200f675369bb689f84d6202a5396"} Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.706364 4813 generic.go:334] "Generic (PLEG): container finished" podID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerID="4900c9a54407705d22dcf343b7be8daf2f39acae814023879db4bba74a790488" exitCode=137 Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.706394 4813 generic.go:334] "Generic (PLEG): container finished" podID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerID="12fc7bc06ed7a26c426b2adf255acd4b79b15add7cbfae069e8a98db9772f414" exitCode=137 Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.706435 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f8458665-fljfq" event={"ID":"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec","Type":"ContainerDied","Data":"4900c9a54407705d22dcf343b7be8daf2f39acae814023879db4bba74a790488"} Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.706474 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f8458665-fljfq" event={"ID":"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec","Type":"ContainerDied","Data":"12fc7bc06ed7a26c426b2adf255acd4b79b15add7cbfae069e8a98db9772f414"} Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.711342 4813 generic.go:334] "Generic (PLEG): container finished" podID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerID="8d7dede48a0cd2a353036958ce41f08b5faa650bbad1e0ae80c16264077867bb" exitCode=137 Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.711370 4813 generic.go:334] "Generic (PLEG): container finished" podID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerID="ea25dd4c1cb8562e2993faea0512df3e7ab3b6c717dd15f1057dec7eb5d47285" exitCode=137 Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.711391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f99dc5487-btwgz" event={"ID":"61eec647-dfb7-4ff8-b143-3823f9aae7bb","Type":"ContainerDied","Data":"8d7dede48a0cd2a353036958ce41f08b5faa650bbad1e0ae80c16264077867bb"} Oct 07 19:35:37 crc kubenswrapper[4813]: I1007 19:35:37.711436 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f99dc5487-btwgz" event={"ID":"61eec647-dfb7-4ff8-b143-3823f9aae7bb","Type":"ContainerDied","Data":"ea25dd4c1cb8562e2993faea0512df3e7ab3b6c717dd15f1057dec7eb5d47285"} Oct 07 19:35:37 crc kubenswrapper[4813]: E1007 19:35:37.938296 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Oct 07 19:35:37 crc kubenswrapper[4813]: E1007 19:35:37.939350 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jqgxp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(d3625b48-fada-4ec5-a62b-4ec51555f5b3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 07 19:35:37 crc kubenswrapper[4813]: E1007 19:35:37.940616 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.146213 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.231984 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-config\") pod \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.232049 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-svc\") pod \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.232080 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-swift-storage-0\") pod \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.232099 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbhkb\" (UniqueName: \"kubernetes.io/projected/6eacad55-fc64-4e75-b743-b106ce2d7c0d-kube-api-access-lbhkb\") pod \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.232205 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-nb\") pod \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.232241 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-sb\") pod \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\" (UID: \"6eacad55-fc64-4e75-b743-b106ce2d7c0d\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.246511 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eacad55-fc64-4e75-b743-b106ce2d7c0d-kube-api-access-lbhkb" (OuterVolumeSpecName: "kube-api-access-lbhkb") pod "6eacad55-fc64-4e75-b743-b106ce2d7c0d" (UID: "6eacad55-fc64-4e75-b743-b106ce2d7c0d"). InnerVolumeSpecName "kube-api-access-lbhkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.343030 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbhkb\" (UniqueName: \"kubernetes.io/projected/6eacad55-fc64-4e75-b743-b106ce2d7c0d-kube-api-access-lbhkb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.361650 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7f688869c6-w96p7"] Oct 07 19:35:38 crc kubenswrapper[4813]: E1007 19:35:38.362507 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="dnsmasq-dns" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.362526 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="dnsmasq-dns" Oct 07 19:35:38 crc kubenswrapper[4813]: E1007 19:35:38.362552 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="init" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.362560 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="init" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.362828 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="dnsmasq-dns" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.364040 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.376141 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.376315 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.376607 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6eacad55-fc64-4e75-b743-b106ce2d7c0d" (UID: "6eacad55-fc64-4e75-b743-b106ce2d7c0d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.412910 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-config" (OuterVolumeSpecName: "config") pod "6eacad55-fc64-4e75-b743-b106ce2d7c0d" (UID: "6eacad55-fc64-4e75-b743-b106ce2d7c0d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.426292 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f688869c6-w96p7"] Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.441776 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6eacad55-fc64-4e75-b743-b106ce2d7c0d" (UID: "6eacad55-fc64-4e75-b743-b106ce2d7c0d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.444224 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46mg5\" (UniqueName: \"kubernetes.io/projected/f241042f-7389-4b62-b934-ac5ac321fcbc-kube-api-access-46mg5\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.445386 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f241042f-7389-4b62-b934-ac5ac321fcbc-logs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.445514 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-internal-tls-certs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.445627 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-config-data\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.445734 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-public-tls-certs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.445867 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-config-data-custom\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.445976 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-combined-ca-bundle\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.446094 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.446196 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.446271 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.465060 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6eacad55-fc64-4e75-b743-b106ce2d7c0d" (UID: "6eacad55-fc64-4e75-b743-b106ce2d7c0d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.504481 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6eacad55-fc64-4e75-b743-b106ce2d7c0d" (UID: "6eacad55-fc64-4e75-b743-b106ce2d7c0d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556306 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-config-data\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556378 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-public-tls-certs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556443 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-config-data-custom\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556460 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-combined-ca-bundle\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556518 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46mg5\" (UniqueName: \"kubernetes.io/projected/f241042f-7389-4b62-b934-ac5ac321fcbc-kube-api-access-46mg5\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556557 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f241042f-7389-4b62-b934-ac5ac321fcbc-logs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556580 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-internal-tls-certs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556637 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.556648 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6eacad55-fc64-4e75-b743-b106ce2d7c0d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.557743 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f241042f-7389-4b62-b934-ac5ac321fcbc-logs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.580487 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-combined-ca-bundle\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.580889 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-internal-tls-certs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.581399 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-public-tls-certs\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.583388 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-config-data-custom\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.583840 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46mg5\" (UniqueName: \"kubernetes.io/projected/f241042f-7389-4b62-b934-ac5ac321fcbc-kube-api-access-46mg5\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.586636 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f241042f-7389-4b62-b934-ac5ac321fcbc-config-data\") pod \"barbican-api-7f688869c6-w96p7\" (UID: \"f241042f-7389-4b62-b934-ac5ac321fcbc\") " pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.652465 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.659356 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.701233 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.732685 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" event={"ID":"6eacad55-fc64-4e75-b743-b106ce2d7c0d","Type":"ContainerDied","Data":"931bec6ada43b9372b5db5fad39551e11fe8e13191d0f37f5cdc78b3ea22e3a9"} Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.733139 4813 scope.go:117] "RemoveContainer" containerID="6421289613ac9e634579a0cb4f21c786241acc6bf2daa80ad64e96f5fbd39467" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.733270 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.742497 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-f99dc5487-btwgz" event={"ID":"61eec647-dfb7-4ff8-b143-3823f9aae7bb","Type":"ContainerDied","Data":"3d230b1ce266e2a3c2a3ab6e9e7cd734d292d931c760eb406e39c40b643a54e4"} Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.742800 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-f99dc5487-btwgz" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.744568 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.761075 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79f8458665-fljfq" event={"ID":"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec","Type":"ContainerDied","Data":"cbf3bd116af2b44b70aabdd1758bec2884ffed477f8d0d3e50d85e13c74caa49"} Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.761178 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79f8458665-fljfq" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.771495 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltknd\" (UniqueName: \"kubernetes.io/projected/61eec647-dfb7-4ff8-b143-3823f9aae7bb-kube-api-access-ltknd\") pod \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.772584 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eec647-dfb7-4ff8-b143-3823f9aae7bb-logs\") pod \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773204 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-config-data\") pod \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773340 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts\") pod \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773387 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-config-data\") pod \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773469 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61eec647-dfb7-4ff8-b143-3823f9aae7bb-horizon-secret-key\") pod \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773523 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-logs\") pod \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773553 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-horizon-secret-key\") pod \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773609 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts\") pod \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.773647 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v6dr\" (UniqueName: \"kubernetes.io/projected/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-kube-api-access-5v6dr\") pod \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.778308 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61eec647-dfb7-4ff8-b143-3823f9aae7bb-kube-api-access-ltknd" (OuterVolumeSpecName: "kube-api-access-ltknd") pod "61eec647-dfb7-4ff8-b143-3823f9aae7bb" (UID: "61eec647-dfb7-4ff8-b143-3823f9aae7bb"). InnerVolumeSpecName "kube-api-access-ltknd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.783821 4813 generic.go:334] "Generic (PLEG): container finished" podID="37f756c5-2123-4e5b-9c02-f33dd061d767" containerID="143c3cee153b0f5795746ee4a66869d82db3cc6a0f7cb52fe7e2148858195334" exitCode=0 Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.783956 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="ceilometer-notification-agent" containerID="cri-o://cf70a979e09f162083cfb6e88bb6f4202b83ebcf183a77ac4b76d3e31bfa5400" gracePeriod=30 Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.784026 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-95b57" event={"ID":"37f756c5-2123-4e5b-9c02-f33dd061d767","Type":"ContainerDied","Data":"143c3cee153b0f5795746ee4a66869d82db3cc6a0f7cb52fe7e2148858195334"} Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.784135 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="sg-core" containerID="cri-o://6708601e7225ba5d88f6588cafa2fe938487716a40df5de4963abc278f9a6bd8" gracePeriod=30 Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.785447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-logs" (OuterVolumeSpecName: "logs") pod "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" (UID: "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.785666 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61eec647-dfb7-4ff8-b143-3823f9aae7bb-logs" (OuterVolumeSpecName: "logs") pod "61eec647-dfb7-4ff8-b143-3823f9aae7bb" (UID: "61eec647-dfb7-4ff8-b143-3823f9aae7bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.786759 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-qlbgn"] Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.818224 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61eec647-dfb7-4ff8-b143-3823f9aae7bb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "61eec647-dfb7-4ff8-b143-3823f9aae7bb" (UID: "61eec647-dfb7-4ff8-b143-3823f9aae7bb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.818347 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-qlbgn"] Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.828278 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" (UID: "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.828857 4813 scope.go:117] "RemoveContainer" containerID="a834cedcd07ae4fe1a2a84cae98724a093bb751ca41de31b4d536f05055e8289" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.832043 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-kube-api-access-5v6dr" (OuterVolumeSpecName: "kube-api-access-5v6dr") pod "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" (UID: "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec"). InnerVolumeSpecName "kube-api-access-5v6dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.867899 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-config-data" (OuterVolumeSpecName: "config-data") pod "61eec647-dfb7-4ff8-b143-3823f9aae7bb" (UID: "61eec647-dfb7-4ff8-b143-3823f9aae7bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.878840 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts" (OuterVolumeSpecName: "scripts") pod "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" (UID: "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.879175 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts" (OuterVolumeSpecName: "scripts") pod "61eec647-dfb7-4ff8-b143-3823f9aae7bb" (UID: "61eec647-dfb7-4ff8-b143-3823f9aae7bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.879901 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b704b49-2eba-44ec-8c8f-88801848930a-logs" (OuterVolumeSpecName: "logs") pod "1b704b49-2eba-44ec-8c8f-88801848930a" (UID: "1b704b49-2eba-44ec-8c8f-88801848930a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.880360 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b704b49-2eba-44ec-8c8f-88801848930a-logs\") pod \"1b704b49-2eba-44ec-8c8f-88801848930a\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.880487 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-scripts\") pod \"1b704b49-2eba-44ec-8c8f-88801848930a\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.880513 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts\") pod \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\" (UID: \"4a23ba8f-0c41-40f0-ab98-7ccd7b321aec\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.881416 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrblm\" (UniqueName: \"kubernetes.io/projected/1b704b49-2eba-44ec-8c8f-88801848930a-kube-api-access-vrblm\") pod \"1b704b49-2eba-44ec-8c8f-88801848930a\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.881874 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1b704b49-2eba-44ec-8c8f-88801848930a-horizon-secret-key\") pod \"1b704b49-2eba-44ec-8c8f-88801848930a\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.881918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts\") pod \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\" (UID: \"61eec647-dfb7-4ff8-b143-3823f9aae7bb\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.881947 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-config-data\") pod \"1b704b49-2eba-44ec-8c8f-88801848930a\" (UID: \"1b704b49-2eba-44ec-8c8f-88801848930a\") " Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882675 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882696 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b704b49-2eba-44ec-8c8f-88801848930a-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882723 4813 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61eec647-dfb7-4ff8-b143-3823f9aae7bb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882737 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882746 4813 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882755 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v6dr\" (UniqueName: \"kubernetes.io/projected/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-kube-api-access-5v6dr\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882764 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltknd\" (UniqueName: \"kubernetes.io/projected/61eec647-dfb7-4ff8-b143-3823f9aae7bb-kube-api-access-ltknd\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.882772 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eec647-dfb7-4ff8-b143-3823f9aae7bb-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: W1007 19:35:38.883596 4813 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec/volumes/kubernetes.io~configmap/scripts Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.883622 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts" (OuterVolumeSpecName: "scripts") pod "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" (UID: "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: W1007 19:35:38.883686 4813 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/61eec647-dfb7-4ff8-b143-3823f9aae7bb/volumes/kubernetes.io~configmap/scripts Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.883694 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts" (OuterVolumeSpecName: "scripts") pod "61eec647-dfb7-4ff8-b143-3823f9aae7bb" (UID: "61eec647-dfb7-4ff8-b143-3823f9aae7bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.913353 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b704b49-2eba-44ec-8c8f-88801848930a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1b704b49-2eba-44ec-8c8f-88801848930a" (UID: "1b704b49-2eba-44ec-8c8f-88801848930a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.925194 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-config-data" (OuterVolumeSpecName: "config-data") pod "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" (UID: "4a23ba8f-0c41-40f0-ab98-7ccd7b321aec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.926378 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b704b49-2eba-44ec-8c8f-88801848930a-kube-api-access-vrblm" (OuterVolumeSpecName: "kube-api-access-vrblm") pod "1b704b49-2eba-44ec-8c8f-88801848930a" (UID: "1b704b49-2eba-44ec-8c8f-88801848930a"). InnerVolumeSpecName "kube-api-access-vrblm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.944459 4813 scope.go:117] "RemoveContainer" containerID="8d7dede48a0cd2a353036958ce41f08b5faa650bbad1e0ae80c16264077867bb" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.947876 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-xrngt"] Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.965227 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f487749db-z8h9h"] Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.984103 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61eec647-dfb7-4ff8-b143-3823f9aae7bb-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.984339 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.985117 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.985249 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrblm\" (UniqueName: \"kubernetes.io/projected/1b704b49-2eba-44ec-8c8f-88801848930a-kube-api-access-vrblm\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.985414 4813 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1b704b49-2eba-44ec-8c8f-88801848930a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:38 crc kubenswrapper[4813]: I1007 19:35:38.986250 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-scripts" (OuterVolumeSpecName: "scripts") pod "1b704b49-2eba-44ec-8c8f-88801848930a" (UID: "1b704b49-2eba-44ec-8c8f-88801848930a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.057305 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-config-data" (OuterVolumeSpecName: "config-data") pod "1b704b49-2eba-44ec-8c8f-88801848930a" (UID: "1b704b49-2eba-44ec-8c8f-88801848930a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.090363 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.090393 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1b704b49-2eba-44ec-8c8f-88801848930a-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.217153 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-664466bb6c-ldqlb"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.243532 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d696dd678-l487w"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.258774 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-79f8458665-fljfq"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.266983 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-79f8458665-fljfq"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.280429 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-f99dc5487-btwgz"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.286259 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-f99dc5487-btwgz"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.399557 4813 scope.go:117] "RemoveContainer" containerID="ea25dd4c1cb8562e2993faea0512df3e7ab3b6c717dd15f1057dec7eb5d47285" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.445558 4813 scope.go:117] "RemoveContainer" containerID="4900c9a54407705d22dcf343b7be8daf2f39acae814023879db4bba74a790488" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.520474 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f688869c6-w96p7"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.632131 4813 scope.go:117] "RemoveContainer" containerID="12fc7bc06ed7a26c426b2adf255acd4b79b15add7cbfae069e8a98db9772f414" Oct 07 19:35:39 crc kubenswrapper[4813]: W1007 19:35:39.692626 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf241042f_7389_4b62_b934_ac5ac321fcbc.slice/crio-0de8758563f3ace0387065469bef6b3c48648ed85b7414f58221b695163d9f8e WatchSource:0}: Error finding container 0de8758563f3ace0387065469bef6b3c48648ed85b7414f58221b695163d9f8e: Status 404 returned error can't find the container with id 0de8758563f3ace0387065469bef6b3c48648ed85b7414f58221b695163d9f8e Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.705267 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.804221 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7479685c4f-mx7hc" event={"ID":"1b704b49-2eba-44ec-8c8f-88801848930a","Type":"ContainerDied","Data":"1967fe3fcc85c1fbe3889c2bc8d42bb991085e5a981513d64ab3c61d88725d15"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.804262 4813 scope.go:117] "RemoveContainer" containerID="1fd31cc3a878b62cedf94beb485599f7d6f255c209dc6e41ec1fd46240ffb75f" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.804377 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7479685c4f-mx7hc" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.819239 4813 generic.go:334] "Generic (PLEG): container finished" podID="7ccb8a83-9c52-4caf-8371-596b60188018" containerID="cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6" exitCode=0 Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.819552 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" event={"ID":"7ccb8a83-9c52-4caf-8371-596b60188018","Type":"ContainerDied","Data":"cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.819708 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" event={"ID":"7ccb8a83-9c52-4caf-8371-596b60188018","Type":"ContainerStarted","Data":"6a0e20f42effe523e60947120e21fbfab3a3ec18990db4887991ae295f48ee55"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.821247 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d696dd678-l487w" event={"ID":"584974f4-f44d-4f67-b675-9b0fb29be7f3","Type":"ContainerStarted","Data":"c14942608118d43fd9d428295a10a8acd26af40dcd444b2c02d67780cec32b12"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.823485 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-664466bb6c-ldqlb" event={"ID":"dc06daa5-4a82-4b6c-bc77-2d40de999f15","Type":"ContainerStarted","Data":"d707f9ae4de385df42c011166485b52e73bf4478a9f90bbc184c9ccaa188fe54"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.849453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f688869c6-w96p7" event={"ID":"f241042f-7389-4b62-b934-ac5ac321fcbc","Type":"ContainerStarted","Data":"0de8758563f3ace0387065469bef6b3c48648ed85b7414f58221b695163d9f8e"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.875384 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7479685c4f-mx7hc"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.880341 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7479685c4f-mx7hc"] Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.885812 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerStarted","Data":"8f638db116aaece97374c22011c37f62dad55024e7eee19077f7487a16681295"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.885852 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerStarted","Data":"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.885865 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.885874 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerStarted","Data":"6b310ecc7fcccc8f97d11990a2e9c0b56f24952a220abb5123648589ce9bf2b7"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.885893 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.924813 4813 generic.go:334] "Generic (PLEG): container finished" podID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerID="6708601e7225ba5d88f6588cafa2fe938487716a40df5de4963abc278f9a6bd8" exitCode=2 Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.925060 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3625b48-fada-4ec5-a62b-4ec51555f5b3","Type":"ContainerDied","Data":"6708601e7225ba5d88f6588cafa2fe938487716a40df5de4963abc278f9a6bd8"} Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.930073 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-f487749db-z8h9h" podStartSLOduration=4.9300521459999995 podStartE2EDuration="4.930052146s" podCreationTimestamp="2025-10-07 19:35:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:39.925566483 +0000 UTC m=+1066.003822094" watchObservedRunningTime="2025-10-07 19:35:39.930052146 +0000 UTC m=+1066.008307757" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.988210 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.988587 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.989468 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68"} pod="openstack/horizon-688984b46d-g79nd" containerMessage="Container horizon failed startup probe, will be restarted" Oct 07 19:35:39 crc kubenswrapper[4813]: I1007 19:35:39.989500 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" containerID="cri-o://6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68" gracePeriod=30 Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.077975 4813 scope.go:117] "RemoveContainer" containerID="c1dd9a2f324a461e87a23bab3288c44e16ae200f675369bb689f84d6202a5396" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.127130 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-687ddb5b-lwwn2" podUID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.127209 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.128143 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"09fc8d6b8f8db537f189bf0bc2613bb56d6dafb56ae7e6bce96d93213c922ce6"} pod="openstack/horizon-687ddb5b-lwwn2" containerMessage="Container horizon failed startup probe, will be restarted" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.128176 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-687ddb5b-lwwn2" podUID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerName="horizon" containerID="cri-o://09fc8d6b8f8db537f189bf0bc2613bb56d6dafb56ae7e6bce96d93213c922ce6" gracePeriod=30 Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.379482 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-95b57" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.442936 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-db-sync-config-data\") pod \"37f756c5-2123-4e5b-9c02-f33dd061d767\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.442978 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-combined-ca-bundle\") pod \"37f756c5-2123-4e5b-9c02-f33dd061d767\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.443024 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-scripts\") pod \"37f756c5-2123-4e5b-9c02-f33dd061d767\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.443051 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-config-data\") pod \"37f756c5-2123-4e5b-9c02-f33dd061d767\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.443089 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vfmq\" (UniqueName: \"kubernetes.io/projected/37f756c5-2123-4e5b-9c02-f33dd061d767-kube-api-access-2vfmq\") pod \"37f756c5-2123-4e5b-9c02-f33dd061d767\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.443127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37f756c5-2123-4e5b-9c02-f33dd061d767-etc-machine-id\") pod \"37f756c5-2123-4e5b-9c02-f33dd061d767\" (UID: \"37f756c5-2123-4e5b-9c02-f33dd061d767\") " Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.443667 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/37f756c5-2123-4e5b-9c02-f33dd061d767-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "37f756c5-2123-4e5b-9c02-f33dd061d767" (UID: "37f756c5-2123-4e5b-9c02-f33dd061d767"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.451831 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37f756c5-2123-4e5b-9c02-f33dd061d767-kube-api-access-2vfmq" (OuterVolumeSpecName: "kube-api-access-2vfmq") pod "37f756c5-2123-4e5b-9c02-f33dd061d767" (UID: "37f756c5-2123-4e5b-9c02-f33dd061d767"). InnerVolumeSpecName "kube-api-access-2vfmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.453898 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-scripts" (OuterVolumeSpecName: "scripts") pod "37f756c5-2123-4e5b-9c02-f33dd061d767" (UID: "37f756c5-2123-4e5b-9c02-f33dd061d767"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.463017 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "37f756c5-2123-4e5b-9c02-f33dd061d767" (UID: "37f756c5-2123-4e5b-9c02-f33dd061d767"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.505114 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-config-data" (OuterVolumeSpecName: "config-data") pod "37f756c5-2123-4e5b-9c02-f33dd061d767" (UID: "37f756c5-2123-4e5b-9c02-f33dd061d767"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.517488 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37f756c5-2123-4e5b-9c02-f33dd061d767" (UID: "37f756c5-2123-4e5b-9c02-f33dd061d767"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.544779 4813 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.544976 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.545035 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.545087 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37f756c5-2123-4e5b-9c02-f33dd061d767-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.545136 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vfmq\" (UniqueName: \"kubernetes.io/projected/37f756c5-2123-4e5b-9c02-f33dd061d767-kube-api-access-2vfmq\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.545190 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37f756c5-2123-4e5b-9c02-f33dd061d767-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.616696 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" path="/var/lib/kubelet/pods/1b704b49-2eba-44ec-8c8f-88801848930a/volumes" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.617373 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" path="/var/lib/kubelet/pods/4a23ba8f-0c41-40f0-ab98-7ccd7b321aec/volumes" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.617963 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" path="/var/lib/kubelet/pods/61eec647-dfb7-4ff8-b143-3823f9aae7bb/volumes" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.619122 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" path="/var/lib/kubelet/pods/6eacad55-fc64-4e75-b743-b106ce2d7c0d/volumes" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.937107 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" event={"ID":"7ccb8a83-9c52-4caf-8371-596b60188018","Type":"ContainerStarted","Data":"0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3"} Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.939974 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.943207 4813 generic.go:334] "Generic (PLEG): container finished" podID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerID="8f638db116aaece97374c22011c37f62dad55024e7eee19077f7487a16681295" exitCode=1 Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.943360 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerDied","Data":"8f638db116aaece97374c22011c37f62dad55024e7eee19077f7487a16681295"} Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.943947 4813 scope.go:117] "RemoveContainer" containerID="8f638db116aaece97374c22011c37f62dad55024e7eee19077f7487a16681295" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.945902 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-95b57" event={"ID":"37f756c5-2123-4e5b-9c02-f33dd061d767","Type":"ContainerDied","Data":"5f1d947136e70799c8bb6061df12edda98af6466b22a107a9015e99ce6f1ea62"} Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.945920 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f1d947136e70799c8bb6061df12edda98af6466b22a107a9015e99ce6f1ea62" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.945960 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-95b57" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.948873 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f688869c6-w96p7" event={"ID":"f241042f-7389-4b62-b934-ac5ac321fcbc","Type":"ContainerStarted","Data":"2869c5bba420fbba1a9803799c6360264dbaee73d45bcff60a2359df12c65d44"} Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.948895 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f688869c6-w96p7" event={"ID":"f241042f-7389-4b62-b934-ac5ac321fcbc","Type":"ContainerStarted","Data":"775d05ab133261fea57bebb021454b3e57b248b084c64f7296e6921907c82a18"} Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.949498 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.949522 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:40 crc kubenswrapper[4813]: I1007 19:35:40.985610 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" podStartSLOduration=5.985594259 podStartE2EDuration="5.985594259s" podCreationTimestamp="2025-10-07 19:35:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:40.965519488 +0000 UTC m=+1067.043775109" watchObservedRunningTime="2025-10-07 19:35:40.985594259 +0000 UTC m=+1067.063849870" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.015418 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7f688869c6-w96p7" podStartSLOduration=3.015401626 podStartE2EDuration="3.015401626s" podCreationTimestamp="2025-10-07 19:35:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:41.012854016 +0000 UTC m=+1067.091109627" watchObservedRunningTime="2025-10-07 19:35:41.015401626 +0000 UTC m=+1067.093657237" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199431 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199832 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199843 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199858 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199864 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199883 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199888 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199903 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199909 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199922 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199927 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199938 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37f756c5-2123-4e5b-9c02-f33dd061d767" containerName="cinder-db-sync" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199944 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="37f756c5-2123-4e5b-9c02-f33dd061d767" containerName="cinder-db-sync" Oct 07 19:35:41 crc kubenswrapper[4813]: E1007 19:35:41.199957 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.199963 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200175 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="37f756c5-2123-4e5b-9c02-f33dd061d767" containerName="cinder-db-sync" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200185 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200200 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b704b49-2eba-44ec-8c8f-88801848930a" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200207 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200218 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200229 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a23ba8f-0c41-40f0-ab98-7ccd7b321aec" containerName="horizon" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.200237 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="61eec647-dfb7-4ff8-b143-3823f9aae7bb" containerName="horizon-log" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.201182 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.206284 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.206423 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jr2vz" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.206449 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.206550 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.243555 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.283444 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c069e7ae-aa85-4df1-bb38-66c5b45c3341-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.283580 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-scripts\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.283617 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.283639 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq9kz\" (UniqueName: \"kubernetes.io/projected/c069e7ae-aa85-4df1-bb38-66c5b45c3341-kube-api-access-xq9kz\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.283663 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.283682 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.291761 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-xrngt"] Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.344452 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b5c85b87-qlbgn" podUID="6eacad55-fc64-4e75-b743-b106ce2d7c0d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.143:5353: i/o timeout" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.344903 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-5qnsr"] Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.361217 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.386797 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c069e7ae-aa85-4df1-bb38-66c5b45c3341-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.399454 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.399745 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.399882 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-config\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.399999 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-svc\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400108 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-scripts\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400292 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400428 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq9kz\" (UniqueName: \"kubernetes.io/projected/c069e7ae-aa85-4df1-bb38-66c5b45c3341-kube-api-access-xq9kz\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400522 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400605 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngmdc\" (UniqueName: \"kubernetes.io/projected/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-kube-api-access-ngmdc\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.400776 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.397609 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c069e7ae-aa85-4df1-bb38-66c5b45c3341-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.427034 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-5qnsr"] Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.427309 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.431839 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-scripts\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.441754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.452641 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.478836 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq9kz\" (UniqueName: \"kubernetes.io/projected/c069e7ae-aa85-4df1-bb38-66c5b45c3341-kube-api-access-xq9kz\") pod \"cinder-scheduler-0\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.478907 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.481725 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503417 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-logs\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503474 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503498 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngmdc\" (UniqueName: \"kubernetes.io/projected/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-kube-api-access-ngmdc\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503562 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-scripts\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503598 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503615 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data-custom\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503633 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503671 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503705 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503739 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-config\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503762 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503781 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs5t4\" (UniqueName: \"kubernetes.io/projected/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-kube-api-access-cs5t4\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.503805 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-svc\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.504478 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.504533 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-svc\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.505111 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.505219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.505702 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-config\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.507677 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.513804 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.563678 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngmdc\" (UniqueName: \"kubernetes.io/projected/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-kube-api-access-ngmdc\") pod \"dnsmasq-dns-5784cf869f-5qnsr\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.580511 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606255 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-scripts\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606346 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data-custom\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606369 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606393 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606442 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606461 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs5t4\" (UniqueName: \"kubernetes.io/projected/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-kube-api-access-cs5t4\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.606504 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-logs\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.607069 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-logs\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.607130 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.617107 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.633304 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.634784 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-scripts\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.639016 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data-custom\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.641997 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs5t4\" (UniqueName: \"kubernetes.io/projected/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-kube-api-access-cs5t4\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.649191 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data\") pod \"cinder-api-0\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.934995 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 19:35:41 crc kubenswrapper[4813]: I1007 19:35:41.995653 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:42 crc kubenswrapper[4813]: I1007 19:35:42.971962 4813 generic.go:334] "Generic (PLEG): container finished" podID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerID="cf70a979e09f162083cfb6e88bb6f4202b83ebcf183a77ac4b76d3e31bfa5400" exitCode=0 Oct 07 19:35:42 crc kubenswrapper[4813]: I1007 19:35:42.971996 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3625b48-fada-4ec5-a62b-4ec51555f5b3","Type":"ContainerDied","Data":"cf70a979e09f162083cfb6e88bb6f4202b83ebcf183a77ac4b76d3e31bfa5400"} Oct 07 19:35:42 crc kubenswrapper[4813]: I1007 19:35:42.972807 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" containerName="dnsmasq-dns" containerID="cri-o://0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3" gracePeriod=10 Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.343965 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357625 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqgxp\" (UniqueName: \"kubernetes.io/projected/d3625b48-fada-4ec5-a62b-4ec51555f5b3-kube-api-access-jqgxp\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357672 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-combined-ca-bundle\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357702 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-scripts\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357825 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-config-data\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357905 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-sg-core-conf-yaml\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357938 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-run-httpd\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.357962 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-log-httpd\") pod \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\" (UID: \"d3625b48-fada-4ec5-a62b-4ec51555f5b3\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.358697 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.362306 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.381487 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-scripts" (OuterVolumeSpecName: "scripts") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.397634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3625b48-fada-4ec5-a62b-4ec51555f5b3-kube-api-access-jqgxp" (OuterVolumeSpecName: "kube-api-access-jqgxp") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "kube-api-access-jqgxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.460748 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.461090 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3625b48-fada-4ec5-a62b-4ec51555f5b3-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.461101 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqgxp\" (UniqueName: \"kubernetes.io/projected/d3625b48-fada-4ec5-a62b-4ec51555f5b3-kube-api-access-jqgxp\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.461113 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.491367 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.491730 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.498468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-config-data" (OuterVolumeSpecName: "config-data") pod "d3625b48-fada-4ec5-a62b-4ec51555f5b3" (UID: "d3625b48-fada-4ec5-a62b-4ec51555f5b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.563974 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.564005 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.564017 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3625b48-fada-4ec5-a62b-4ec51555f5b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.687079 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.767473 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-nb\") pod \"7ccb8a83-9c52-4caf-8371-596b60188018\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.767522 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-sb\") pod \"7ccb8a83-9c52-4caf-8371-596b60188018\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.767544 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-svc\") pod \"7ccb8a83-9c52-4caf-8371-596b60188018\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.767577 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-swift-storage-0\") pod \"7ccb8a83-9c52-4caf-8371-596b60188018\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.767612 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czccn\" (UniqueName: \"kubernetes.io/projected/7ccb8a83-9c52-4caf-8371-596b60188018-kube-api-access-czccn\") pod \"7ccb8a83-9c52-4caf-8371-596b60188018\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.767692 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-config\") pod \"7ccb8a83-9c52-4caf-8371-596b60188018\" (UID: \"7ccb8a83-9c52-4caf-8371-596b60188018\") " Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.771859 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5496dd8845-nwmf5" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.794425 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ccb8a83-9c52-4caf-8371-596b60188018-kube-api-access-czccn" (OuterVolumeSpecName: "kube-api-access-czccn") pod "7ccb8a83-9c52-4caf-8371-596b60188018" (UID: "7ccb8a83-9c52-4caf-8371-596b60188018"). InnerVolumeSpecName "kube-api-access-czccn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.847176 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8476d947c6-lsgxz"] Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.847607 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-8476d947c6-lsgxz" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-api" containerID="cri-o://47ef3467562b47038a42a674d54ca8b537ddf4767184e9344268458fd349c955" gracePeriod=30 Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.848833 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-8476d947c6-lsgxz" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-httpd" containerID="cri-o://8fd244f967228cde51b33731d7616c65680df3932fd4482af26a4a17dc661ac7" gracePeriod=30 Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.875044 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czccn\" (UniqueName: \"kubernetes.io/projected/7ccb8a83-9c52-4caf-8371-596b60188018-kube-api-access-czccn\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.879094 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:43 crc kubenswrapper[4813]: I1007 19:35:43.980810 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ccb8a83-9c52-4caf-8371-596b60188018" (UID: "7ccb8a83-9c52-4caf-8371-596b60188018"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.035126 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-config" (OuterVolumeSpecName: "config") pod "7ccb8a83-9c52-4caf-8371-596b60188018" (UID: "7ccb8a83-9c52-4caf-8371-596b60188018"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.104123 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7ccb8a83-9c52-4caf-8371-596b60188018" (UID: "7ccb8a83-9c52-4caf-8371-596b60188018"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.115931 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.115971 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.115983 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.124693 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.138375 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c069e7ae-aa85-4df1-bb38-66c5b45c3341","Type":"ContainerStarted","Data":"d342f99e66629c4df52d6feeef1b1ab3fedf6526e43396f7e1eda5f3c8df2c09"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.146106 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d3625b48-fada-4ec5-a62b-4ec51555f5b3","Type":"ContainerDied","Data":"7f5fa66cea70981e619d987fd4c7b05482677211bc2d98f17443a1496c45d975"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.146210 4813 scope.go:117] "RemoveContainer" containerID="6708601e7225ba5d88f6588cafa2fe938487716a40df5de4963abc278f9a6bd8" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.146431 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.153371 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ccb8a83-9c52-4caf-8371-596b60188018" (UID: "7ccb8a83-9c52-4caf-8371-596b60188018"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.155189 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7ccb8a83-9c52-4caf-8371-596b60188018" (UID: "7ccb8a83-9c52-4caf-8371-596b60188018"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.183393 4813 generic.go:334] "Generic (PLEG): container finished" podID="7ccb8a83-9c52-4caf-8371-596b60188018" containerID="0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3" exitCode=0 Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.183512 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" event={"ID":"7ccb8a83-9c52-4caf-8371-596b60188018","Type":"ContainerDied","Data":"0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.183546 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" event={"ID":"7ccb8a83-9c52-4caf-8371-596b60188018","Type":"ContainerDied","Data":"6a0e20f42effe523e60947120e21fbfab3a3ec18990db4887991ae295f48ee55"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.183678 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-xrngt" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.202710 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.203698 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerStarted","Data":"650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.205632 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.206384 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.222212 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.230197 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d696dd678-l487w" event={"ID":"584974f4-f44d-4f67-b675-9b0fb29be7f3","Type":"ContainerStarted","Data":"d91c367eaeea1e2bc49f6f86e5cd534817379624488801d66029af567e63a72b"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.244141 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.244165 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ccb8a83-9c52-4caf-8371-596b60188018-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.259849 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-664466bb6c-ldqlb" event={"ID":"dc06daa5-4a82-4b6c-bc77-2d40de999f15","Type":"ContainerStarted","Data":"1a824b1c82ad0b0465b5b0385c66dbf4b3b789ddc9da0a57e7d0fb185a3fe61f"} Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.427464 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-5qnsr"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.458394 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-xrngt"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.465598 4813 scope.go:117] "RemoveContainer" containerID="cf70a979e09f162083cfb6e88bb6f4202b83ebcf183a77ac4b76d3e31bfa5400" Oct 07 19:35:44 crc kubenswrapper[4813]: W1007 19:35:44.484541 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c6518c9_a69b_4270_8fd8_a7f55eacfce7.slice/crio-d48f1a13549cf87ef169897a69c6f6d7b84419bea587188a0c74289c9b15e173 WatchSource:0}: Error finding container d48f1a13549cf87ef169897a69c6f6d7b84419bea587188a0c74289c9b15e173: Status 404 returned error can't find the container with id d48f1a13549cf87ef169897a69c6f6d7b84419bea587188a0c74289c9b15e173 Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.495246 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-xrngt"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.536250 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.562927 4813 scope.go:117] "RemoveContainer" containerID="0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.563060 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.583559 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.584040 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" containerName="dnsmasq-dns" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584057 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" containerName="dnsmasq-dns" Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.584082 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" containerName="init" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584088 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" containerName="init" Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.584105 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="sg-core" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584112 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="sg-core" Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.584126 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="ceilometer-notification-agent" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584132 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="ceilometer-notification-agent" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584299 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="sg-core" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584315 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" containerName="ceilometer-notification-agent" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.584344 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" containerName="dnsmasq-dns" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.585999 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.596353 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.596544 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.603404 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.618938 4813 scope.go:117] "RemoveContainer" containerID="cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.642447 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ccb8a83-9c52-4caf-8371-596b60188018" path="/var/lib/kubelet/pods/7ccb8a83-9c52-4caf-8371-596b60188018/volumes" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.644753 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3625b48-fada-4ec5-a62b-4ec51555f5b3" path="/var/lib/kubelet/pods/d3625b48-fada-4ec5-a62b-4ec51555f5b3/volumes" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.660537 4813 scope.go:117] "RemoveContainer" containerID="0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3" Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.668656 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3\": container with ID starting with 0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3 not found: ID does not exist" containerID="0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.668701 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3"} err="failed to get container status \"0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3\": rpc error: code = NotFound desc = could not find container \"0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3\": container with ID starting with 0beb6dae8851b9199ff44ab7b09527d239b3d83efef4b4a1cf157646a9d1f8f3 not found: ID does not exist" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.668729 4813 scope.go:117] "RemoveContainer" containerID="cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6" Oct 07 19:35:44 crc kubenswrapper[4813]: E1007 19:35:44.678441 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6\": container with ID starting with cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6 not found: ID does not exist" containerID="cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.678473 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6"} err="failed to get container status \"cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6\": rpc error: code = NotFound desc = could not find container \"cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6\": container with ID starting with cf8e6dc5a66242a778ffa1bfb2d5228bfed61873da0cf5da2bd5673cae5358a6 not found: ID does not exist" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768559 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs44v\" (UniqueName: \"kubernetes.io/projected/3477fb6d-49bd-4728-832d-4ffd556c2866-kube-api-access-gs44v\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768642 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-scripts\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768685 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-log-httpd\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768708 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-run-httpd\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768754 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768825 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.768873 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-config-data\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871563 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-scripts\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871642 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-run-httpd\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871686 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-log-httpd\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871741 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871783 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871831 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-config-data\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.871920 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs44v\" (UniqueName: \"kubernetes.io/projected/3477fb6d-49bd-4728-832d-4ffd556c2866-kube-api-access-gs44v\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.876074 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-run-httpd\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.882666 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-log-httpd\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.890221 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-config-data\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.891507 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.901063 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs44v\" (UniqueName: \"kubernetes.io/projected/3477fb6d-49bd-4728-832d-4ffd556c2866-kube-api-access-gs44v\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.911858 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.911942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-scripts\") pod \"ceilometer-0\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " pod="openstack/ceilometer-0" Oct 07 19:35:44 crc kubenswrapper[4813]: I1007 19:35:44.934954 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.000774 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.002079 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.300278 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d696dd678-l487w" event={"ID":"584974f4-f44d-4f67-b675-9b0fb29be7f3","Type":"ContainerStarted","Data":"6a33114f6f92f3b58c4ab3278ea9e220acda02316fbdae0b734b58d9b56f061a"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.314285 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-664466bb6c-ldqlb" event={"ID":"dc06daa5-4a82-4b6c-bc77-2d40de999f15","Type":"ContainerStarted","Data":"9e9b8271c499e713fe571f77b417a6d6c18db87b9c6e7d36fc08b254ffcffa00"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.342216 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-d696dd678-l487w" podStartSLOduration=7.559755838 podStartE2EDuration="11.342200618s" podCreationTimestamp="2025-10-07 19:35:34 +0000 UTC" firstStartedPulling="2025-10-07 19:35:39.40010338 +0000 UTC m=+1065.478358991" lastFinishedPulling="2025-10-07 19:35:43.18254816 +0000 UTC m=+1069.260803771" observedRunningTime="2025-10-07 19:35:45.326649271 +0000 UTC m=+1071.404904882" watchObservedRunningTime="2025-10-07 19:35:45.342200618 +0000 UTC m=+1071.420456229" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.358467 4813 generic.go:334] "Generic (PLEG): container finished" podID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerID="abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59" exitCode=0 Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.358615 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" event={"ID":"2c6518c9-a69b-4270-8fd8-a7f55eacfce7","Type":"ContainerDied","Data":"abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.358687 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" event={"ID":"2c6518c9-a69b-4270-8fd8-a7f55eacfce7","Type":"ContainerStarted","Data":"d48f1a13549cf87ef169897a69c6f6d7b84419bea587188a0c74289c9b15e173"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.359899 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-664466bb6c-ldqlb" podStartSLOduration=7.341494193 podStartE2EDuration="11.359878213s" podCreationTimestamp="2025-10-07 19:35:34 +0000 UTC" firstStartedPulling="2025-10-07 19:35:39.399569026 +0000 UTC m=+1065.477824627" lastFinishedPulling="2025-10-07 19:35:43.417953046 +0000 UTC m=+1069.496208647" observedRunningTime="2025-10-07 19:35:45.357486307 +0000 UTC m=+1071.435741918" watchObservedRunningTime="2025-10-07 19:35:45.359878213 +0000 UTC m=+1071.438133824" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.360262 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49","Type":"ContainerStarted","Data":"a043c754dba5101de4f0a28d5b8bef28284733ce4ae0d79d89657b2837b1752e"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.415663 4813 generic.go:334] "Generic (PLEG): container finished" podID="f24d064e-e0af-428b-a988-9850845b32e4" containerID="8fd244f967228cde51b33731d7616c65680df3932fd4482af26a4a17dc661ac7" exitCode=0 Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.415891 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8476d947c6-lsgxz" event={"ID":"f24d064e-e0af-428b-a988-9850845b32e4","Type":"ContainerDied","Data":"8fd244f967228cde51b33731d7616c65680df3932fd4482af26a4a17dc661ac7"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.443347 4813 generic.go:334] "Generic (PLEG): container finished" podID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" exitCode=1 Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.443400 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerDied","Data":"650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947"} Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.443433 4813 scope.go:117] "RemoveContainer" containerID="8f638db116aaece97374c22011c37f62dad55024e7eee19077f7487a16681295" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.444090 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.444174 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:45 crc kubenswrapper[4813]: E1007 19:35:45.444279 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.546745 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.994797 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:45 crc kubenswrapper[4813]: I1007 19:35:45.995494 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:46 crc kubenswrapper[4813]: I1007 19:35:46.467480 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49","Type":"ContainerStarted","Data":"b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a"} Oct 07 19:35:46 crc kubenswrapper[4813]: I1007 19:35:46.478282 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:46 crc kubenswrapper[4813]: I1007 19:35:46.478315 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:46 crc kubenswrapper[4813]: E1007 19:35:46.478500 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:46 crc kubenswrapper[4813]: I1007 19:35:46.482896 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerStarted","Data":"e6d87742d0d48017d14afb056713a62f021f110f31796247d25fa5ba57d51893"} Oct 07 19:35:46 crc kubenswrapper[4813]: I1007 19:35:46.496132 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" event={"ID":"2c6518c9-a69b-4270-8fd8-a7f55eacfce7","Type":"ContainerStarted","Data":"c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca"} Oct 07 19:35:46 crc kubenswrapper[4813]: I1007 19:35:46.496600 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.505114 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49","Type":"ContainerStarted","Data":"ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77"} Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.505610 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api-log" containerID="cri-o://b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a" gracePeriod=30 Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.505871 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.506089 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api" containerID="cri-o://ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77" gracePeriod=30 Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.516343 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c069e7ae-aa85-4df1-bb38-66c5b45c3341","Type":"ContainerStarted","Data":"5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635"} Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.516383 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c069e7ae-aa85-4df1-bb38-66c5b45c3341","Type":"ContainerStarted","Data":"a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e"} Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.531233 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerStarted","Data":"4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677"} Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.531989 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:47 crc kubenswrapper[4813]: E1007 19:35:47.532185 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.532559 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.547062 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.547027004 podStartE2EDuration="6.547027004s" podCreationTimestamp="2025-10-07 19:35:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:47.533246627 +0000 UTC m=+1073.611502238" watchObservedRunningTime="2025-10-07 19:35:47.547027004 +0000 UTC m=+1073.625282615" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.548076 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" podStartSLOduration=6.548070443 podStartE2EDuration="6.548070443s" podCreationTimestamp="2025-10-07 19:35:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:46.521523766 +0000 UTC m=+1072.599779377" watchObservedRunningTime="2025-10-07 19:35:47.548070443 +0000 UTC m=+1073.626326054" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.578424 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.633745664 podStartE2EDuration="6.578388395s" podCreationTimestamp="2025-10-07 19:35:41 +0000 UTC" firstStartedPulling="2025-10-07 19:35:43.916528142 +0000 UTC m=+1069.994783753" lastFinishedPulling="2025-10-07 19:35:45.861170883 +0000 UTC m=+1071.939426484" observedRunningTime="2025-10-07 19:35:47.559810335 +0000 UTC m=+1073.638065946" watchObservedRunningTime="2025-10-07 19:35:47.578388395 +0000 UTC m=+1073.656644006" Oct 07 19:35:47 crc kubenswrapper[4813]: I1007 19:35:47.996107 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:48 crc kubenswrapper[4813]: I1007 19:35:48.544044 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerStarted","Data":"5f107cf54261a1000aaa1a0f00be8f665ba33e407dc73f82e5077d08b233eb04"} Oct 07 19:35:48 crc kubenswrapper[4813]: I1007 19:35:48.549715 4813 generic.go:334] "Generic (PLEG): container finished" podID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerID="b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a" exitCode=143 Oct 07 19:35:48 crc kubenswrapper[4813]: I1007 19:35:48.549795 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49","Type":"ContainerDied","Data":"b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a"} Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.468146 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591623 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-logs\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591726 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-scripts\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591776 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-combined-ca-bundle\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591845 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data-custom\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591888 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591909 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs5t4\" (UniqueName: \"kubernetes.io/projected/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-kube-api-access-cs5t4\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.591947 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-etc-machine-id\") pod \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\" (UID: \"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49\") " Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.592365 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.592987 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerStarted","Data":"75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c"} Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.593873 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-logs" (OuterVolumeSpecName: "logs") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.601521 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-kube-api-access-cs5t4" (OuterVolumeSpecName: "kube-api-access-cs5t4") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "kube-api-access-cs5t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.601970 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-scripts" (OuterVolumeSpecName: "scripts") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.610465 4813 generic.go:334] "Generic (PLEG): container finished" podID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerID="ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77" exitCode=0 Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.610505 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49","Type":"ContainerDied","Data":"ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77"} Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.610531 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49","Type":"ContainerDied","Data":"a043c754dba5101de4f0a28d5b8bef28284733ce4ae0d79d89657b2837b1752e"} Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.610552 4813 scope.go:117] "RemoveContainer" containerID="ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.610670 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.618907 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.646458 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.696079 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs5t4\" (UniqueName: \"kubernetes.io/projected/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-kube-api-access-cs5t4\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.696106 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.696116 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.696126 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.696133 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.696141 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.751626 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data" (OuterVolumeSpecName: "config-data") pod "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" (UID: "1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.754861 4813 scope.go:117] "RemoveContainer" containerID="b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.798207 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.815061 4813 scope.go:117] "RemoveContainer" containerID="ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77" Oct 07 19:35:49 crc kubenswrapper[4813]: E1007 19:35:49.816697 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77\": container with ID starting with ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77 not found: ID does not exist" containerID="ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.816743 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77"} err="failed to get container status \"ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77\": rpc error: code = NotFound desc = could not find container \"ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77\": container with ID starting with ec7d84a52ab9d9489af6524463a8d3ed39eabac5de14643b486d27c51485ff77 not found: ID does not exist" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.816769 4813 scope.go:117] "RemoveContainer" containerID="b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a" Oct 07 19:35:49 crc kubenswrapper[4813]: E1007 19:35:49.817045 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a\": container with ID starting with b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a not found: ID does not exist" containerID="b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.817069 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a"} err="failed to get container status \"b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a\": rpc error: code = NotFound desc = could not find container \"b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a\": container with ID starting with b2ad144da16e0df2ccf1add1d455114350e843bf897041635f577c678c83944a not found: ID does not exist" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.938131 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.944898 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.961701 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:49 crc kubenswrapper[4813]: E1007 19:35:49.962055 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api-log" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.962071 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api-log" Oct 07 19:35:49 crc kubenswrapper[4813]: E1007 19:35:49.962081 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.962088 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.962281 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api-log" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.962306 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" containerName="cinder-api" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.963202 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.964897 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.965024 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 07 19:35:49 crc kubenswrapper[4813]: I1007 19:35:49.965719 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.028183 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102303 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102633 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec6f0c69-4799-4be4-b465-19ff21b1f35a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102675 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec6f0c69-4799-4be4-b465-19ff21b1f35a-logs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102695 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-config-data\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102745 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102792 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-config-data-custom\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102811 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-scripts\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102836 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w8f6\" (UniqueName: \"kubernetes.io/projected/ec6f0c69-4799-4be4-b465-19ff21b1f35a-kube-api-access-6w8f6\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.102860 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204038 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-config-data-custom\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204080 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-scripts\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204109 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w8f6\" (UniqueName: \"kubernetes.io/projected/ec6f0c69-4799-4be4-b465-19ff21b1f35a-kube-api-access-6w8f6\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204134 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204156 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204183 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec6f0c69-4799-4be4-b465-19ff21b1f35a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204214 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec6f0c69-4799-4be4-b465-19ff21b1f35a-logs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204233 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-config-data\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.204295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.205023 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec6f0c69-4799-4be4-b465-19ff21b1f35a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.205060 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec6f0c69-4799-4be4-b465-19ff21b1f35a-logs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.209754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-config-data-custom\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.209869 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.211132 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-config-data\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.211508 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-scripts\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.215925 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.216515 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6f0c69-4799-4be4-b465-19ff21b1f35a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.223875 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w8f6\" (UniqueName: \"kubernetes.io/projected/ec6f0c69-4799-4be4-b465-19ff21b1f35a-kube-api-access-6w8f6\") pod \"cinder-api-0\" (UID: \"ec6f0c69-4799-4be4-b465-19ff21b1f35a\") " pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.277744 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.561472 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.637633 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49" path="/var/lib/kubelet/pods/1b7a1bd6-b2ff-4c0c-a51d-3d1dc90b8a49/volumes" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.687280 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerStarted","Data":"1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb"} Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.688772 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.719478 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.182035923 podStartE2EDuration="6.719457792s" podCreationTimestamp="2025-10-07 19:35:44 +0000 UTC" firstStartedPulling="2025-10-07 19:35:45.58946644 +0000 UTC m=+1071.667722051" lastFinishedPulling="2025-10-07 19:35:50.126888309 +0000 UTC m=+1076.205143920" observedRunningTime="2025-10-07 19:35:50.707067072 +0000 UTC m=+1076.785322683" watchObservedRunningTime="2025-10-07 19:35:50.719457792 +0000 UTC m=+1076.797713403" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.735818 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-bd4864b74-5mp8m" Oct 07 19:35:50 crc kubenswrapper[4813]: I1007 19:35:50.933448 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.997787 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.998419 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.998458 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.999147 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="barbican-api-log" containerStatusID={"Type":"cri-o","ID":"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a"} pod="openstack/barbican-api-f487749db-z8h9h" containerMessage="Container barbican-api-log failed liveness probe, will be restarted" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.999165 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.999184 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" containerID="cri-o://98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a" gracePeriod=30 Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:50.999509 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Oct 07 19:35:51 crc kubenswrapper[4813]: E1007 19:35:51.473062 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.582295 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.620495 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.699835 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec6f0c69-4799-4be4-b465-19ff21b1f35a","Type":"ContainerStarted","Data":"bf1ef7241355bee18e66af9fc6ddeb196bd3451af4f7b8074219a8ccf32361d1"} Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.707304 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-rkmzm"] Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.713675 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" podUID="84915478-8abb-40dd-bb9a-4c623f742063" containerName="dnsmasq-dns" containerID="cri-o://d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec" gracePeriod=10 Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.714504 4813 generic.go:334] "Generic (PLEG): container finished" podID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerID="98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a" exitCode=143 Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.714647 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerDied","Data":"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a"} Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.714743 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerStarted","Data":"243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8"} Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.715480 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:51 crc kubenswrapper[4813]: I1007 19:35:51.715645 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:51 crc kubenswrapper[4813]: E1007 19:35:51.715942 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.259901 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 07 19:35:52 crc kubenswrapper[4813]: E1007 19:35:52.293600 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84915478_8abb_40dd_bb9a_4c623f742063.slice/crio-conmon-d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec.scope\": RecentStats: unable to find data in memory cache]" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.331008 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.516655 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.674451 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-sb\") pod \"84915478-8abb-40dd-bb9a-4c623f742063\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.674534 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-swift-storage-0\") pod \"84915478-8abb-40dd-bb9a-4c623f742063\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.674702 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-svc\") pod \"84915478-8abb-40dd-bb9a-4c623f742063\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.674741 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxj22\" (UniqueName: \"kubernetes.io/projected/84915478-8abb-40dd-bb9a-4c623f742063-kube-api-access-sxj22\") pod \"84915478-8abb-40dd-bb9a-4c623f742063\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.674762 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-nb\") pod \"84915478-8abb-40dd-bb9a-4c623f742063\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.674812 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-config\") pod \"84915478-8abb-40dd-bb9a-4c623f742063\" (UID: \"84915478-8abb-40dd-bb9a-4c623f742063\") " Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.682788 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84915478-8abb-40dd-bb9a-4c623f742063-kube-api-access-sxj22" (OuterVolumeSpecName: "kube-api-access-sxj22") pod "84915478-8abb-40dd-bb9a-4c623f742063" (UID: "84915478-8abb-40dd-bb9a-4c623f742063"). InnerVolumeSpecName "kube-api-access-sxj22". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.730269 4813 generic.go:334] "Generic (PLEG): container finished" podID="f24d064e-e0af-428b-a988-9850845b32e4" containerID="47ef3467562b47038a42a674d54ca8b537ddf4767184e9344268458fd349c955" exitCode=0 Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.730348 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8476d947c6-lsgxz" event={"ID":"f24d064e-e0af-428b-a988-9850845b32e4","Type":"ContainerDied","Data":"47ef3467562b47038a42a674d54ca8b537ddf4767184e9344268458fd349c955"} Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.771450 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "84915478-8abb-40dd-bb9a-4c623f742063" (UID: "84915478-8abb-40dd-bb9a-4c623f742063"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.781935 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxj22\" (UniqueName: \"kubernetes.io/projected/84915478-8abb-40dd-bb9a-4c623f742063-kube-api-access-sxj22\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.781964 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.782224 4813 generic.go:334] "Generic (PLEG): container finished" podID="84915478-8abb-40dd-bb9a-4c623f742063" containerID="d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec" exitCode=0 Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.782313 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" event={"ID":"84915478-8abb-40dd-bb9a-4c623f742063","Type":"ContainerDied","Data":"d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec"} Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.782362 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" event={"ID":"84915478-8abb-40dd-bb9a-4c623f742063","Type":"ContainerDied","Data":"d329df4b8efadeedc1d7606256f14a047fc02e384a61b47de72d22f620b7e25f"} Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.782384 4813 scope.go:117] "RemoveContainer" containerID="d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.782519 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-rkmzm" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.784819 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-config" (OuterVolumeSpecName: "config") pod "84915478-8abb-40dd-bb9a-4c623f742063" (UID: "84915478-8abb-40dd-bb9a-4c623f742063"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.786234 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "84915478-8abb-40dd-bb9a-4c623f742063" (UID: "84915478-8abb-40dd-bb9a-4c623f742063"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.804094 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec6f0c69-4799-4be4-b465-19ff21b1f35a","Type":"ContainerStarted","Data":"412034e0df72139f56709b42b3711ad0bb1b87021f6dbf6f5b21b177e12b2673"} Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.804836 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:52 crc kubenswrapper[4813]: E1007 19:35:52.805029 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=barbican-api pod=barbican-api-f487749db-z8h9h_openstack(8705c88f-a04a-4861-9e64-05bf5e90237f)\"" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.837857 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "84915478-8abb-40dd-bb9a-4c623f742063" (UID: "84915478-8abb-40dd-bb9a-4c623f742063"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.846920 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "84915478-8abb-40dd-bb9a-4c623f742063" (UID: "84915478-8abb-40dd-bb9a-4c623f742063"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.883068 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.883096 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.883106 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.883116 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/84915478-8abb-40dd-bb9a-4c623f742063-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:52 crc kubenswrapper[4813]: I1007 19:35:52.922486 4813 scope.go:117] "RemoveContainer" containerID="c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.058267 4813 scope.go:117] "RemoveContainer" containerID="d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec" Oct 07 19:35:53 crc kubenswrapper[4813]: E1007 19:35:53.064460 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec\": container with ID starting with d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec not found: ID does not exist" containerID="d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.064500 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec"} err="failed to get container status \"d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec\": rpc error: code = NotFound desc = could not find container \"d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec\": container with ID starting with d54363343a9907b5d2dc1c43b44849594739ecf4e2067b75918b9881db043aec not found: ID does not exist" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.064525 4813 scope.go:117] "RemoveContainer" containerID="c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f" Oct 07 19:35:53 crc kubenswrapper[4813]: E1007 19:35:53.064956 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f\": container with ID starting with c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f not found: ID does not exist" containerID="c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.064987 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f"} err="failed to get container status \"c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f\": rpc error: code = NotFound desc = could not find container \"c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f\": container with ID starting with c3e9856b24228c3aaee6e81cc8ae92600da3bfe5b6001ccc91141e67be4f031f not found: ID does not exist" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.174380 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.186538 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-rkmzm"] Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.200908 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-rkmzm"] Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.296394 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f688869c6-w96p7" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.299529 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-config\") pod \"f24d064e-e0af-428b-a988-9850845b32e4\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.299792 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-ovndb-tls-certs\") pod \"f24d064e-e0af-428b-a988-9850845b32e4\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.299901 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-combined-ca-bundle\") pod \"f24d064e-e0af-428b-a988-9850845b32e4\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.300007 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-httpd-config\") pod \"f24d064e-e0af-428b-a988-9850845b32e4\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.300123 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w76tl\" (UniqueName: \"kubernetes.io/projected/f24d064e-e0af-428b-a988-9850845b32e4-kube-api-access-w76tl\") pod \"f24d064e-e0af-428b-a988-9850845b32e4\" (UID: \"f24d064e-e0af-428b-a988-9850845b32e4\") " Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.314000 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f24d064e-e0af-428b-a988-9850845b32e4" (UID: "f24d064e-e0af-428b-a988-9850845b32e4"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.316045 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f24d064e-e0af-428b-a988-9850845b32e4-kube-api-access-w76tl" (OuterVolumeSpecName: "kube-api-access-w76tl") pod "f24d064e-e0af-428b-a988-9850845b32e4" (UID: "f24d064e-e0af-428b-a988-9850845b32e4"). InnerVolumeSpecName "kube-api-access-w76tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.382536 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f24d064e-e0af-428b-a988-9850845b32e4" (UID: "f24d064e-e0af-428b-a988-9850845b32e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.396470 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-f487749db-z8h9h"] Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.408790 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.408827 4813 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.408837 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w76tl\" (UniqueName: \"kubernetes.io/projected/f24d064e-e0af-428b-a988-9850845b32e4-kube-api-access-w76tl\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.428201 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-config" (OuterVolumeSpecName: "config") pod "f24d064e-e0af-428b-a988-9850845b32e4" (UID: "f24d064e-e0af-428b-a988-9850845b32e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.440213 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f24d064e-e0af-428b-a988-9850845b32e4" (UID: "f24d064e-e0af-428b-a988-9850845b32e4"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.510847 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.511109 4813 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f24d064e-e0af-428b-a988-9850845b32e4-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.815663 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ec6f0c69-4799-4be4-b465-19ff21b1f35a","Type":"ContainerStarted","Data":"20faeb493516e1be0a5331a19c83c10c35e267bf18ced8b6debd233130885149"} Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.815822 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.818508 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8476d947c6-lsgxz" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.818502 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8476d947c6-lsgxz" event={"ID":"f24d064e-e0af-428b-a988-9850845b32e4","Type":"ContainerDied","Data":"01691d1b5ea52b3cd4fdb7e87fb2e8ab313259741ee06d0d512fb940e2fff787"} Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.818550 4813 scope.go:117] "RemoveContainer" containerID="8fd244f967228cde51b33731d7616c65680df3932fd4482af26a4a17dc661ac7" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.818620 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-f487749db-z8h9h" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" containerID="cri-o://243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8" gracePeriod=30 Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.843710 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.843691789 podStartE2EDuration="4.843691789s" podCreationTimestamp="2025-10-07 19:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:35:53.833665574 +0000 UTC m=+1079.911921185" watchObservedRunningTime="2025-10-07 19:35:53.843691789 +0000 UTC m=+1079.921947400" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.867753 4813 scope.go:117] "RemoveContainer" containerID="47ef3467562b47038a42a674d54ca8b537ddf4767184e9344268458fd349c955" Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.921917 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8476d947c6-lsgxz"] Oct 07 19:35:53 crc kubenswrapper[4813]: I1007 19:35:53.963942 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8476d947c6-lsgxz"] Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.342765 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.441648 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data-custom\") pod \"8705c88f-a04a-4861-9e64-05bf5e90237f\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.441705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-combined-ca-bundle\") pod \"8705c88f-a04a-4861-9e64-05bf5e90237f\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.441783 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8705c88f-a04a-4861-9e64-05bf5e90237f-logs\") pod \"8705c88f-a04a-4861-9e64-05bf5e90237f\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.441824 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ttpr\" (UniqueName: \"kubernetes.io/projected/8705c88f-a04a-4861-9e64-05bf5e90237f-kube-api-access-2ttpr\") pod \"8705c88f-a04a-4861-9e64-05bf5e90237f\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.441863 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data\") pod \"8705c88f-a04a-4861-9e64-05bf5e90237f\" (UID: \"8705c88f-a04a-4861-9e64-05bf5e90237f\") " Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.442447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8705c88f-a04a-4861-9e64-05bf5e90237f-logs" (OuterVolumeSpecName: "logs") pod "8705c88f-a04a-4861-9e64-05bf5e90237f" (UID: "8705c88f-a04a-4861-9e64-05bf5e90237f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.445472 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8705c88f-a04a-4861-9e64-05bf5e90237f" (UID: "8705c88f-a04a-4861-9e64-05bf5e90237f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.447857 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8705c88f-a04a-4861-9e64-05bf5e90237f-kube-api-access-2ttpr" (OuterVolumeSpecName: "kube-api-access-2ttpr") pod "8705c88f-a04a-4861-9e64-05bf5e90237f" (UID: "8705c88f-a04a-4861-9e64-05bf5e90237f"). InnerVolumeSpecName "kube-api-access-2ttpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.478699 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8705c88f-a04a-4861-9e64-05bf5e90237f" (UID: "8705c88f-a04a-4861-9e64-05bf5e90237f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.521146 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data" (OuterVolumeSpecName: "config-data") pod "8705c88f-a04a-4861-9e64-05bf5e90237f" (UID: "8705c88f-a04a-4861-9e64-05bf5e90237f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.545648 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.545906 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.545977 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8705c88f-a04a-4861-9e64-05bf5e90237f-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.546045 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ttpr\" (UniqueName: \"kubernetes.io/projected/8705c88f-a04a-4861-9e64-05bf5e90237f-kube-api-access-2ttpr\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.546104 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8705c88f-a04a-4861-9e64-05bf5e90237f-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.615407 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84915478-8abb-40dd-bb9a-4c623f742063" path="/var/lib/kubelet/pods/84915478-8abb-40dd-bb9a-4c623f742063/volumes" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.617927 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f24d064e-e0af-428b-a988-9850845b32e4" path="/var/lib/kubelet/pods/f24d064e-e0af-428b-a988-9850845b32e4/volumes" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.830085 4813 generic.go:334] "Generic (PLEG): container finished" podID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerID="243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8" exitCode=143 Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.830180 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerDied","Data":"243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8"} Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.830212 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f487749db-z8h9h" event={"ID":"8705c88f-a04a-4861-9e64-05bf5e90237f","Type":"ContainerDied","Data":"6b310ecc7fcccc8f97d11990a2e9c0b56f24952a220abb5123648589ce9bf2b7"} Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.830249 4813 scope.go:117] "RemoveContainer" containerID="243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.830933 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f487749db-z8h9h" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.855170 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.863477 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-f487749db-z8h9h"] Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.868022 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-f487749db-z8h9h"] Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.877437 4813 scope.go:117] "RemoveContainer" containerID="98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.899295 4813 scope.go:117] "RemoveContainer" containerID="243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8" Oct 07 19:35:54 crc kubenswrapper[4813]: E1007 19:35:54.902431 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8\": container with ID starting with 243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8 not found: ID does not exist" containerID="243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.902536 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8"} err="failed to get container status \"243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8\": rpc error: code = NotFound desc = could not find container \"243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8\": container with ID starting with 243e0f4b18873e7f5566aec3b7dafc77175f88eef4ea0183aa6d461c2a4fb3c8 not found: ID does not exist" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.902652 4813 scope.go:117] "RemoveContainer" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:54 crc kubenswrapper[4813]: E1007 19:35:54.903081 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947\": container with ID starting with 650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947 not found: ID does not exist" containerID="650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.903165 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947"} err="failed to get container status \"650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947\": rpc error: code = NotFound desc = could not find container \"650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947\": container with ID starting with 650cec258290036d1b7d0b2f52f38e6393a664c5af2391dca18276aa9503a947 not found: ID does not exist" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.903236 4813 scope.go:117] "RemoveContainer" containerID="98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a" Oct 07 19:35:54 crc kubenswrapper[4813]: E1007 19:35:54.903524 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a\": container with ID starting with 98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a not found: ID does not exist" containerID="98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a" Oct 07 19:35:54 crc kubenswrapper[4813]: I1007 19:35:54.903609 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a"} err="failed to get container status \"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a\": rpc error: code = NotFound desc = could not find container \"98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a\": container with ID starting with 98435cf2c6eeb69435e96220f6370090a64001853a7e9bedfbfd3f255d5a5b0a not found: ID does not exist" Oct 07 19:35:55 crc kubenswrapper[4813]: I1007 19:35:55.694649 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-68ff4bb5b-nhpkd" Oct 07 19:35:56 crc kubenswrapper[4813]: I1007 19:35:56.592109 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 07 19:35:56 crc kubenswrapper[4813]: I1007 19:35:56.611896 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" path="/var/lib/kubelet/pods/8705c88f-a04a-4861-9e64-05bf5e90237f/volumes" Oct 07 19:35:56 crc kubenswrapper[4813]: I1007 19:35:56.637475 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:56 crc kubenswrapper[4813]: I1007 19:35:56.876002 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="probe" containerID="cri-o://5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635" gracePeriod=30 Oct 07 19:35:56 crc kubenswrapper[4813]: I1007 19:35:56.876173 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="cinder-scheduler" containerID="cri-o://a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e" gracePeriod=30 Oct 07 19:35:57 crc kubenswrapper[4813]: I1007 19:35:57.889015 4813 generic.go:334] "Generic (PLEG): container finished" podID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerID="5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635" exitCode=0 Oct 07 19:35:57 crc kubenswrapper[4813]: I1007 19:35:57.889092 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c069e7ae-aa85-4df1-bb38-66c5b45c3341","Type":"ContainerDied","Data":"5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635"} Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.551278 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.726280 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data-custom\") pod \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.726446 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-scripts\") pod \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.727226 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq9kz\" (UniqueName: \"kubernetes.io/projected/c069e7ae-aa85-4df1-bb38-66c5b45c3341-kube-api-access-xq9kz\") pod \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.727276 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-combined-ca-bundle\") pod \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.727300 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data\") pod \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.727348 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c069e7ae-aa85-4df1-bb38-66c5b45c3341-etc-machine-id\") pod \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\" (UID: \"c069e7ae-aa85-4df1-bb38-66c5b45c3341\") " Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.727627 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c069e7ae-aa85-4df1-bb38-66c5b45c3341-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c069e7ae-aa85-4df1-bb38-66c5b45c3341" (UID: "c069e7ae-aa85-4df1-bb38-66c5b45c3341"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.736634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-scripts" (OuterVolumeSpecName: "scripts") pod "c069e7ae-aa85-4df1-bb38-66c5b45c3341" (UID: "c069e7ae-aa85-4df1-bb38-66c5b45c3341"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.737079 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c069e7ae-aa85-4df1-bb38-66c5b45c3341-kube-api-access-xq9kz" (OuterVolumeSpecName: "kube-api-access-xq9kz") pod "c069e7ae-aa85-4df1-bb38-66c5b45c3341" (UID: "c069e7ae-aa85-4df1-bb38-66c5b45c3341"). InnerVolumeSpecName "kube-api-access-xq9kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.752980 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c069e7ae-aa85-4df1-bb38-66c5b45c3341" (UID: "c069e7ae-aa85-4df1-bb38-66c5b45c3341"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.813434 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c069e7ae-aa85-4df1-bb38-66c5b45c3341" (UID: "c069e7ae-aa85-4df1-bb38-66c5b45c3341"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.829922 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq9kz\" (UniqueName: \"kubernetes.io/projected/c069e7ae-aa85-4df1-bb38-66c5b45c3341-kube-api-access-xq9kz\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.830160 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.830226 4813 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c069e7ae-aa85-4df1-bb38-66c5b45c3341-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.830285 4813 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.830700 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.849265 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data" (OuterVolumeSpecName: "config-data") pod "c069e7ae-aa85-4df1-bb38-66c5b45c3341" (UID: "c069e7ae-aa85-4df1-bb38-66c5b45c3341"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.902270 4813 generic.go:334] "Generic (PLEG): container finished" podID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerID="a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e" exitCode=0 Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.902317 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c069e7ae-aa85-4df1-bb38-66c5b45c3341","Type":"ContainerDied","Data":"a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e"} Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.902348 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.902368 4813 scope.go:117] "RemoveContainer" containerID="5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.902356 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c069e7ae-aa85-4df1-bb38-66c5b45c3341","Type":"ContainerDied","Data":"d342f99e66629c4df52d6feeef1b1ab3fedf6526e43396f7e1eda5f3c8df2c09"} Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.932384 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069e7ae-aa85-4df1-bb38-66c5b45c3341-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.952983 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.957710 4813 scope.go:117] "RemoveContainer" containerID="a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.979096 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.986671 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987089 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987104 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987124 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987131 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-api" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987144 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="cinder-scheduler" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987151 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="cinder-scheduler" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987162 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-httpd" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987169 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-httpd" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987180 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987186 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987210 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84915478-8abb-40dd-bb9a-4c623f742063" containerName="init" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987218 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="84915478-8abb-40dd-bb9a-4c623f742063" containerName="init" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987226 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84915478-8abb-40dd-bb9a-4c623f742063" containerName="dnsmasq-dns" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987232 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="84915478-8abb-40dd-bb9a-4c623f742063" containerName="dnsmasq-dns" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987240 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987246 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987261 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="probe" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987267 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="probe" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987437 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987448 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987455 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="probe" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987471 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987479 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f24d064e-e0af-428b-a988-9850845b32e4" containerName="neutron-httpd" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987488 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="84915478-8abb-40dd-bb9a-4c623f742063" containerName="dnsmasq-dns" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987509 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" containerName="cinder-scheduler" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987518 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987529 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api-log" Oct 07 19:35:58 crc kubenswrapper[4813]: E1007 19:35:58.987681 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.987689 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8705c88f-a04a-4861-9e64-05bf5e90237f" containerName="barbican-api" Oct 07 19:35:58 crc kubenswrapper[4813]: I1007 19:35:58.992275 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:58.996927 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:58.996998 4813 scope.go:117] "RemoveContainer" containerID="5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635" Oct 07 19:35:59 crc kubenswrapper[4813]: E1007 19:35:58.998004 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635\": container with ID starting with 5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635 not found: ID does not exist" containerID="5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:58.998027 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635"} err="failed to get container status \"5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635\": rpc error: code = NotFound desc = could not find container \"5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635\": container with ID starting with 5db2e12a38835dd2d6ac588008d4217063de63ca524bce5ab890fc540b794635 not found: ID does not exist" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:58.998045 4813 scope.go:117] "RemoveContainer" containerID="a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e" Oct 07 19:35:59 crc kubenswrapper[4813]: E1007 19:35:59.004127 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e\": container with ID starting with a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e not found: ID does not exist" containerID="a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.004165 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e"} err="failed to get container status \"a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e\": rpc error: code = NotFound desc = could not find container \"a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e\": container with ID starting with a94f52a10e542458ba669ecd3436870d4e36fffcc0da15e3b1f4ed0c55f53b8e not found: ID does not exist" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.005715 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.033455 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrzkd\" (UniqueName: \"kubernetes.io/projected/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-kube-api-access-hrzkd\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.033532 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-config-data\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.033581 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-scripts\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.033598 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.033628 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.033667 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.134972 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-scripts\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.135025 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.135070 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.135104 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.135205 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrzkd\" (UniqueName: \"kubernetes.io/projected/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-kube-api-access-hrzkd\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.135250 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-config-data\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.135650 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.138876 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.140802 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.142301 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-config-data\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.151792 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrzkd\" (UniqueName: \"kubernetes.io/projected/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-kube-api-access-hrzkd\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.153835 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bad7f43d-8146-46b1-a2d4-9c4a23cd4377-scripts\") pod \"cinder-scheduler-0\" (UID: \"bad7f43d-8146-46b1-a2d4-9c4a23cd4377\") " pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.321773 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.605289 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.606891 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.613149 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.613293 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.613420 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-kk2h4" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.635451 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.643217 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh72j\" (UniqueName: \"kubernetes.io/projected/00a18181-39b8-42bc-8cc9-4518c7a16137-kube-api-access-qh72j\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.643262 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a18181-39b8-42bc-8cc9-4518c7a16137-combined-ca-bundle\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.643452 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00a18181-39b8-42bc-8cc9-4518c7a16137-openstack-config-secret\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.643632 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00a18181-39b8-42bc-8cc9-4518c7a16137-openstack-config\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.745127 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00a18181-39b8-42bc-8cc9-4518c7a16137-openstack-config\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.745255 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh72j\" (UniqueName: \"kubernetes.io/projected/00a18181-39b8-42bc-8cc9-4518c7a16137-kube-api-access-qh72j\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.745295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a18181-39b8-42bc-8cc9-4518c7a16137-combined-ca-bundle\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.745398 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00a18181-39b8-42bc-8cc9-4518c7a16137-openstack-config-secret\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.747525 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/00a18181-39b8-42bc-8cc9-4518c7a16137-openstack-config\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.754027 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a18181-39b8-42bc-8cc9-4518c7a16137-combined-ca-bundle\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.754041 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/00a18181-39b8-42bc-8cc9-4518c7a16137-openstack-config-secret\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.766933 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh72j\" (UniqueName: \"kubernetes.io/projected/00a18181-39b8-42bc-8cc9-4518c7a16137-kube-api-access-qh72j\") pod \"openstackclient\" (UID: \"00a18181-39b8-42bc-8cc9-4518c7a16137\") " pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.811129 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.935887 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 07 19:35:59 crc kubenswrapper[4813]: I1007 19:35:59.948915 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bad7f43d-8146-46b1-a2d4-9c4a23cd4377","Type":"ContainerStarted","Data":"b4fe9c0abad71f1000282a81eba06c5dc58635001cb7a112d161679fc8155791"} Oct 07 19:36:00 crc kubenswrapper[4813]: I1007 19:36:00.478629 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 07 19:36:00 crc kubenswrapper[4813]: I1007 19:36:00.621269 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c069e7ae-aa85-4df1-bb38-66c5b45c3341" path="/var/lib/kubelet/pods/c069e7ae-aa85-4df1-bb38-66c5b45c3341/volumes" Oct 07 19:36:00 crc kubenswrapper[4813]: I1007 19:36:00.962196 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"00a18181-39b8-42bc-8cc9-4518c7a16137","Type":"ContainerStarted","Data":"8ef94f2e19ff7463b2e19dcf714a7fa7e2f96572579e207a5b80f70cb5b1ad68"} Oct 07 19:36:00 crc kubenswrapper[4813]: I1007 19:36:00.964632 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bad7f43d-8146-46b1-a2d4-9c4a23cd4377","Type":"ContainerStarted","Data":"7b3c64f40582b84e83f04d5adfc15bfce8543e07a2202c6f336a4c1300ab7e42"} Oct 07 19:36:01 crc kubenswrapper[4813]: I1007 19:36:01.979276 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bad7f43d-8146-46b1-a2d4-9c4a23cd4377","Type":"ContainerStarted","Data":"a6a2212804ea4c6dbb1b8f6805e55e1349c1ed5708a1f5654c9d417d419cc46a"} Oct 07 19:36:02 crc kubenswrapper[4813]: I1007 19:36:02.012074 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.012047728 podStartE2EDuration="4.012047728s" podCreationTimestamp="2025-10-07 19:35:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:36:01.997232448 +0000 UTC m=+1088.075488059" watchObservedRunningTime="2025-10-07 19:36:02.012047728 +0000 UTC m=+1088.090303339" Oct 07 19:36:03 crc kubenswrapper[4813]: I1007 19:36:03.095469 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.321933 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.801013 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-65d65664c-r46qm"] Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.802754 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.807002 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.807120 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.808116 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.835121 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-65d65664c-r46qm"] Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866193 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-config-data\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866250 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-log-httpd\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866279 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgmmf\" (UniqueName: \"kubernetes.io/projected/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-kube-api-access-dgmmf\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866340 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-combined-ca-bundle\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866378 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-run-httpd\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866445 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-etc-swift\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866492 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-public-tls-certs\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.866527 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-internal-tls-certs\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.967810 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-internal-tls-certs\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.967895 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-config-data\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.967924 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-log-httpd\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.967947 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgmmf\" (UniqueName: \"kubernetes.io/projected/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-kube-api-access-dgmmf\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.967984 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-combined-ca-bundle\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.968016 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-run-httpd\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.968043 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-etc-swift\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.968084 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-public-tls-certs\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.969533 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-run-httpd\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.971653 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-log-httpd\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.974022 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-public-tls-certs\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.982327 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-etc-swift\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.982602 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-combined-ca-bundle\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.985360 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-config-data\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.986290 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-internal-tls-certs\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:04 crc kubenswrapper[4813]: I1007 19:36:04.988839 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgmmf\" (UniqueName: \"kubernetes.io/projected/d3c7d72e-ba30-402f-99f1-aff8e4c688ee-kube-api-access-dgmmf\") pod \"swift-proxy-65d65664c-r46qm\" (UID: \"d3c7d72e-ba30-402f-99f1-aff8e4c688ee\") " pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.122481 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.305570 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.306137 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-central-agent" containerID="cri-o://4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677" gracePeriod=30 Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.306449 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="proxy-httpd" containerID="cri-o://1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb" gracePeriod=30 Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.306603 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-notification-agent" containerID="cri-o://5f107cf54261a1000aaa1a0f00be8f665ba33e407dc73f82e5077d08b233eb04" gracePeriod=30 Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.306679 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="sg-core" containerID="cri-o://75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c" gracePeriod=30 Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.414944 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.168:3000/\": read tcp 10.217.0.2:47860->10.217.0.168:3000: read: connection reset by peer" Oct 07 19:36:05 crc kubenswrapper[4813]: I1007 19:36:05.768332 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-65d65664c-r46qm"] Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.032247 4813 generic.go:334] "Generic (PLEG): container finished" podID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerID="1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb" exitCode=0 Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.032285 4813 generic.go:334] "Generic (PLEG): container finished" podID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerID="75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c" exitCode=2 Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.032295 4813 generic.go:334] "Generic (PLEG): container finished" podID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerID="4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677" exitCode=0 Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.032480 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerDied","Data":"1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb"} Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.032516 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerDied","Data":"75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c"} Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.032530 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerDied","Data":"4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677"} Oct 07 19:36:06 crc kubenswrapper[4813]: I1007 19:36:06.035240 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65d65664c-r46qm" event={"ID":"d3c7d72e-ba30-402f-99f1-aff8e4c688ee","Type":"ContainerStarted","Data":"9a1b305b119d0f5cbc43fd19112445827b2a6153a5cf1297a63663ec80a00fcd"} Oct 07 19:36:07 crc kubenswrapper[4813]: I1007 19:36:07.046766 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65d65664c-r46qm" event={"ID":"d3c7d72e-ba30-402f-99f1-aff8e4c688ee","Type":"ContainerStarted","Data":"c0de8e643ade631adc506afdc334bdf4206560b62c1a817737b5a6e8ab6b7f92"} Oct 07 19:36:09 crc kubenswrapper[4813]: I1007 19:36:09.067661 4813 generic.go:334] "Generic (PLEG): container finished" podID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerID="5f107cf54261a1000aaa1a0f00be8f665ba33e407dc73f82e5077d08b233eb04" exitCode=0 Oct 07 19:36:09 crc kubenswrapper[4813]: I1007 19:36:09.067860 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerDied","Data":"5f107cf54261a1000aaa1a0f00be8f665ba33e407dc73f82e5077d08b233eb04"} Oct 07 19:36:09 crc kubenswrapper[4813]: I1007 19:36:09.787804 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 07 19:36:10 crc kubenswrapper[4813]: E1007 19:36:10.058313 4813 manager.go:1116] Failed to create existing container: /kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8705c88f_a04a_4861_9e64_05bf5e90237f.slice/crio-6b310ecc7fcccc8f97d11990a2e9c0b56f24952a220abb5123648589ce9bf2b7: Error finding container 6b310ecc7fcccc8f97d11990a2e9c0b56f24952a220abb5123648589ce9bf2b7: Status 404 returned error can't find the container with id 6b310ecc7fcccc8f97d11990a2e9c0b56f24952a220abb5123648589ce9bf2b7 Oct 07 19:36:10 crc kubenswrapper[4813]: I1007 19:36:10.084791 4813 generic.go:334] "Generic (PLEG): container finished" podID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerID="6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68" exitCode=137 Oct 07 19:36:10 crc kubenswrapper[4813]: I1007 19:36:10.084831 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerDied","Data":"6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68"} Oct 07 19:36:10 crc kubenswrapper[4813]: E1007 19:36:10.267774 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-conmon-75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-conmon-4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-5f107cf54261a1000aaa1a0f00be8f665ba33e407dc73f82e5077d08b233eb04.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaed6b0b2_d265_4f3f_a68b_215696e44617.slice/crio-6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8705c88f_a04a_4861_9e64_05bf5e90237f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-conmon-1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3477fb6d_49bd_4728_832d_4ffd556c2866.slice/crio-75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaed6b0b2_d265_4f3f_a68b_215696e44617.slice/crio-conmon-6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0b0d403_9a0c_407b_a3d4_a0db3e612092.slice/crio-conmon-09fc8d6b8f8db537f189bf0bc2613bb56d6dafb56ae7e6bce96d93213c922ce6.scope\": RecentStats: unable to find data in memory cache]" Oct 07 19:36:11 crc kubenswrapper[4813]: I1007 19:36:11.103034 4813 generic.go:334] "Generic (PLEG): container finished" podID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerID="09fc8d6b8f8db537f189bf0bc2613bb56d6dafb56ae7e6bce96d93213c922ce6" exitCode=137 Oct 07 19:36:11 crc kubenswrapper[4813]: I1007 19:36:11.103301 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-687ddb5b-lwwn2" event={"ID":"a0b0d403-9a0c-407b-a3d4-a0db3e612092","Type":"ContainerDied","Data":"09fc8d6b8f8db537f189bf0bc2613bb56d6dafb56ae7e6bce96d93213c922ce6"} Oct 07 19:36:12 crc kubenswrapper[4813]: I1007 19:36:12.984678 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080315 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-scripts\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080474 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-sg-core-conf-yaml\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080533 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs44v\" (UniqueName: \"kubernetes.io/projected/3477fb6d-49bd-4728-832d-4ffd556c2866-kube-api-access-gs44v\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080580 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-run-httpd\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-config-data\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080643 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-combined-ca-bundle\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.080675 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-log-httpd\") pod \"3477fb6d-49bd-4728-832d-4ffd556c2866\" (UID: \"3477fb6d-49bd-4728-832d-4ffd556c2866\") " Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.081415 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.081556 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.086547 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3477fb6d-49bd-4728-832d-4ffd556c2866-kube-api-access-gs44v" (OuterVolumeSpecName: "kube-api-access-gs44v") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "kube-api-access-gs44v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.102026 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-scripts" (OuterVolumeSpecName: "scripts") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.157137 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.158838 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"00a18181-39b8-42bc-8cc9-4518c7a16137","Type":"ContainerStarted","Data":"20aedb3ecae6dca24fdc19973cc5f44920394e02053be90861c0cd2e58166a6d"} Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.174974 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerStarted","Data":"f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86"} Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.177910 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.035650355 podStartE2EDuration="14.177877454s" podCreationTimestamp="2025-10-07 19:35:59 +0000 UTC" firstStartedPulling="2025-10-07 19:36:00.532839023 +0000 UTC m=+1086.611094634" lastFinishedPulling="2025-10-07 19:36:12.675066122 +0000 UTC m=+1098.753321733" observedRunningTime="2025-10-07 19:36:13.177648677 +0000 UTC m=+1099.255904288" watchObservedRunningTime="2025-10-07 19:36:13.177877454 +0000 UTC m=+1099.256133065" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.185011 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.185039 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs44v\" (UniqueName: \"kubernetes.io/projected/3477fb6d-49bd-4728-832d-4ffd556c2866-kube-api-access-gs44v\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.185050 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.185057 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3477fb6d-49bd-4728-832d-4ffd556c2866-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.185065 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.185666 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-687ddb5b-lwwn2" event={"ID":"a0b0d403-9a0c-407b-a3d4-a0db3e612092","Type":"ContainerStarted","Data":"4fd56f6923eb6274c1fc6ddc748221b750f344af2139fb97638b5de07d27c380"} Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.193560 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-65d65664c-r46qm" event={"ID":"d3c7d72e-ba30-402f-99f1-aff8e4c688ee","Type":"ContainerStarted","Data":"42fb8dde720ed77aec66ac1764ac8874faee22c1b5ce732511c7492150a36fd9"} Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.194073 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.194179 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.204365 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3477fb6d-49bd-4728-832d-4ffd556c2866","Type":"ContainerDied","Data":"e6d87742d0d48017d14afb056713a62f021f110f31796247d25fa5ba57d51893"} Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.204415 4813 scope.go:117] "RemoveContainer" containerID="1f5d0532125d8516d14b2e500969f9472bd1973cc6fdc91b0a0e5be8a83b55cb" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.204420 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.216514 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.225624 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-65d65664c-r46qm" podUID="d3c7d72e-ba30-402f-99f1-aff8e4c688ee" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.261772 4813 scope.go:117] "RemoveContainer" containerID="75dc9b33dbd27fd0d60b8d2fbe18fec358911426a4043de45f40e06cce24206c" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.262525 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-65d65664c-r46qm" podStartSLOduration=9.262500551 podStartE2EDuration="9.262500551s" podCreationTimestamp="2025-10-07 19:36:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:36:13.262143071 +0000 UTC m=+1099.340398682" watchObservedRunningTime="2025-10-07 19:36:13.262500551 +0000 UTC m=+1099.340756162" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.276702 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-config-data" (OuterVolumeSpecName: "config-data") pod "3477fb6d-49bd-4728-832d-4ffd556c2866" (UID: "3477fb6d-49bd-4728-832d-4ffd556c2866"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.287247 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.287277 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3477fb6d-49bd-4728-832d-4ffd556c2866-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.295137 4813 scope.go:117] "RemoveContainer" containerID="5f107cf54261a1000aaa1a0f00be8f665ba33e407dc73f82e5077d08b233eb04" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.314743 4813 scope.go:117] "RemoveContainer" containerID="4c8ca7e84ef4af776b2ed6b725215519ec01466989e4fd6920807763e2e25677" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.536826 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.545583 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.568415 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:13 crc kubenswrapper[4813]: E1007 19:36:13.568879 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-central-agent" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.568902 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-central-agent" Oct 07 19:36:13 crc kubenswrapper[4813]: E1007 19:36:13.568939 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-notification-agent" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.568950 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-notification-agent" Oct 07 19:36:13 crc kubenswrapper[4813]: E1007 19:36:13.568971 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="sg-core" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.568979 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="sg-core" Oct 07 19:36:13 crc kubenswrapper[4813]: E1007 19:36:13.568988 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="proxy-httpd" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.568995 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="proxy-httpd" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.569220 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-notification-agent" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.569244 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="proxy-httpd" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.569269 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="sg-core" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.569285 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" containerName="ceilometer-central-agent" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.571711 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.574300 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.575034 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.605904 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-run-httpd\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694638 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694669 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-scripts\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694753 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjg2l\" (UniqueName: \"kubernetes.io/projected/def784b9-3422-478d-8966-f6fe5aaa3b63-kube-api-access-cjg2l\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694833 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-config-data\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694864 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.694942 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-log-httpd\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.796311 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-run-httpd\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.796608 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.796690 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-scripts\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.796792 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjg2l\" (UniqueName: \"kubernetes.io/projected/def784b9-3422-478d-8966-f6fe5aaa3b63-kube-api-access-cjg2l\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.796927 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-config-data\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.797250 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.796964 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-run-httpd\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.797486 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-log-httpd\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.797898 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-log-httpd\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.801426 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.802533 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-config-data\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.803019 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.814636 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjg2l\" (UniqueName: \"kubernetes.io/projected/def784b9-3422-478d-8966-f6fe5aaa3b63-kube-api-access-cjg2l\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:13 crc kubenswrapper[4813]: I1007 19:36:13.815641 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-scripts\") pod \"ceilometer-0\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " pod="openstack/ceilometer-0" Oct 07 19:36:14 crc kubenswrapper[4813]: I1007 19:36:14.025070 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:14 crc kubenswrapper[4813]: I1007 19:36:14.131062 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-65d65664c-r46qm" podUID="d3c7d72e-ba30-402f-99f1-aff8e4c688ee" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 07 19:36:14 crc kubenswrapper[4813]: I1007 19:36:14.258621 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-65d65664c-r46qm" podUID="d3c7d72e-ba30-402f-99f1-aff8e4c688ee" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 07 19:36:14 crc kubenswrapper[4813]: I1007 19:36:14.323087 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:14 crc kubenswrapper[4813]: I1007 19:36:14.482337 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:14 crc kubenswrapper[4813]: I1007 19:36:14.620310 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3477fb6d-49bd-4728-832d-4ffd556c2866" path="/var/lib/kubelet/pods/3477fb6d-49bd-4728-832d-4ffd556c2866/volumes" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.050136 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-hln5n"] Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.051722 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.086461 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-hln5n"] Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.160299 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zv8w\" (UniqueName: \"kubernetes.io/projected/f60ece8a-8926-46ef-9ae9-126856f4b1b5-kube-api-access-7zv8w\") pod \"nova-api-db-create-hln5n\" (UID: \"f60ece8a-8926-46ef-9ae9-126856f4b1b5\") " pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.246616 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-5qmtk"] Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.248229 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.253008 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerStarted","Data":"d58b2604f5ab29704bdd92eb02723233b134369a738e96165957f066a799c6db"} Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.254713 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5qmtk"] Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.289720 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rthb2\" (UniqueName: \"kubernetes.io/projected/c803f2bd-9bf8-43fa-af9a-3a61bdac0972-kube-api-access-rthb2\") pod \"nova-cell0-db-create-5qmtk\" (UID: \"c803f2bd-9bf8-43fa-af9a-3a61bdac0972\") " pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.289755 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zv8w\" (UniqueName: \"kubernetes.io/projected/f60ece8a-8926-46ef-9ae9-126856f4b1b5-kube-api-access-7zv8w\") pod \"nova-api-db-create-hln5n\" (UID: \"f60ece8a-8926-46ef-9ae9-126856f4b1b5\") " pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.356562 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zv8w\" (UniqueName: \"kubernetes.io/projected/f60ece8a-8926-46ef-9ae9-126856f4b1b5-kube-api-access-7zv8w\") pod \"nova-api-db-create-hln5n\" (UID: \"f60ece8a-8926-46ef-9ae9-126856f4b1b5\") " pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.374407 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-gwx5z"] Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.382185 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.384886 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.391938 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rthb2\" (UniqueName: \"kubernetes.io/projected/c803f2bd-9bf8-43fa-af9a-3a61bdac0972-kube-api-access-rthb2\") pod \"nova-cell0-db-create-5qmtk\" (UID: \"c803f2bd-9bf8-43fa-af9a-3a61bdac0972\") " pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.404818 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.429376 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gwx5z"] Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.446073 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rthb2\" (UniqueName: \"kubernetes.io/projected/c803f2bd-9bf8-43fa-af9a-3a61bdac0972-kube-api-access-rthb2\") pod \"nova-cell0-db-create-5qmtk\" (UID: \"c803f2bd-9bf8-43fa-af9a-3a61bdac0972\") " pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.493686 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pwsf\" (UniqueName: \"kubernetes.io/projected/b0969e89-a3e6-4d6d-80f3-381d112f949d-kube-api-access-7pwsf\") pod \"nova-cell1-db-create-gwx5z\" (UID: \"b0969e89-a3e6-4d6d-80f3-381d112f949d\") " pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.580476 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.595023 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pwsf\" (UniqueName: \"kubernetes.io/projected/b0969e89-a3e6-4d6d-80f3-381d112f949d-kube-api-access-7pwsf\") pod \"nova-cell1-db-create-gwx5z\" (UID: \"b0969e89-a3e6-4d6d-80f3-381d112f949d\") " pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.623279 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pwsf\" (UniqueName: \"kubernetes.io/projected/b0969e89-a3e6-4d6d-80f3-381d112f949d-kube-api-access-7pwsf\") pod \"nova-cell1-db-create-gwx5z\" (UID: \"b0969e89-a3e6-4d6d-80f3-381d112f949d\") " pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:15 crc kubenswrapper[4813]: I1007 19:36:15.754019 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:16 crc kubenswrapper[4813]: I1007 19:36:16.114854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-hln5n"] Oct 07 19:36:16 crc kubenswrapper[4813]: I1007 19:36:16.272773 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerStarted","Data":"eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86"} Oct 07 19:36:16 crc kubenswrapper[4813]: I1007 19:36:16.274883 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hln5n" event={"ID":"f60ece8a-8926-46ef-9ae9-126856f4b1b5","Type":"ContainerStarted","Data":"622b2dfdb8f4e6824aaad32391b576c5592d0de3809187f5dbf5cd15211c3064"} Oct 07 19:36:16 crc kubenswrapper[4813]: I1007 19:36:16.319990 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5qmtk"] Oct 07 19:36:16 crc kubenswrapper[4813]: W1007 19:36:16.333516 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc803f2bd_9bf8_43fa_af9a_3a61bdac0972.slice/crio-49eab090ef1731271e82aa088771b2129aa81f6a887cb841540fae58fbbb9041 WatchSource:0}: Error finding container 49eab090ef1731271e82aa088771b2129aa81f6a887cb841540fae58fbbb9041: Status 404 returned error can't find the container with id 49eab090ef1731271e82aa088771b2129aa81f6a887cb841540fae58fbbb9041 Oct 07 19:36:16 crc kubenswrapper[4813]: I1007 19:36:16.429078 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-gwx5z"] Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.284464 4813 generic.go:334] "Generic (PLEG): container finished" podID="b0969e89-a3e6-4d6d-80f3-381d112f949d" containerID="32dce849bc269113a34ace501a116e4331ac899be38f14dd1d052795ca870f1a" exitCode=0 Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.284605 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gwx5z" event={"ID":"b0969e89-a3e6-4d6d-80f3-381d112f949d","Type":"ContainerDied","Data":"32dce849bc269113a34ace501a116e4331ac899be38f14dd1d052795ca870f1a"} Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.284908 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gwx5z" event={"ID":"b0969e89-a3e6-4d6d-80f3-381d112f949d","Type":"ContainerStarted","Data":"78958b835dc97703dfd21d3eb9a21437b3bdbb4de28979e070889b6eb2e07b7c"} Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.287339 4813 generic.go:334] "Generic (PLEG): container finished" podID="f60ece8a-8926-46ef-9ae9-126856f4b1b5" containerID="62d244cc4322b3e5f4fa64a6748772dabb9b28797e5ef17a7c08291f4321275d" exitCode=0 Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.287463 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hln5n" event={"ID":"f60ece8a-8926-46ef-9ae9-126856f4b1b5","Type":"ContainerDied","Data":"62d244cc4322b3e5f4fa64a6748772dabb9b28797e5ef17a7c08291f4321275d"} Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.289228 4813 generic.go:334] "Generic (PLEG): container finished" podID="c803f2bd-9bf8-43fa-af9a-3a61bdac0972" containerID="4b1061e097dab9aabc6c3919f0168d342dcf30b60160f9409e6055aeb332f1df" exitCode=0 Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.289349 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5qmtk" event={"ID":"c803f2bd-9bf8-43fa-af9a-3a61bdac0972","Type":"ContainerDied","Data":"4b1061e097dab9aabc6c3919f0168d342dcf30b60160f9409e6055aeb332f1df"} Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.289439 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5qmtk" event={"ID":"c803f2bd-9bf8-43fa-af9a-3a61bdac0972","Type":"ContainerStarted","Data":"49eab090ef1731271e82aa088771b2129aa81f6a887cb841540fae58fbbb9041"} Oct 07 19:36:17 crc kubenswrapper[4813]: I1007 19:36:17.291221 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerStarted","Data":"0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662"} Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.300578 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerStarted","Data":"e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37"} Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.872749 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.881720 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.889189 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.989371 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zv8w\" (UniqueName: \"kubernetes.io/projected/f60ece8a-8926-46ef-9ae9-126856f4b1b5-kube-api-access-7zv8w\") pod \"f60ece8a-8926-46ef-9ae9-126856f4b1b5\" (UID: \"f60ece8a-8926-46ef-9ae9-126856f4b1b5\") " Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.989772 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rthb2\" (UniqueName: \"kubernetes.io/projected/c803f2bd-9bf8-43fa-af9a-3a61bdac0972-kube-api-access-rthb2\") pod \"c803f2bd-9bf8-43fa-af9a-3a61bdac0972\" (UID: \"c803f2bd-9bf8-43fa-af9a-3a61bdac0972\") " Oct 07 19:36:18 crc kubenswrapper[4813]: I1007 19:36:18.989982 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pwsf\" (UniqueName: \"kubernetes.io/projected/b0969e89-a3e6-4d6d-80f3-381d112f949d-kube-api-access-7pwsf\") pod \"b0969e89-a3e6-4d6d-80f3-381d112f949d\" (UID: \"b0969e89-a3e6-4d6d-80f3-381d112f949d\") " Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.000480 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0969e89-a3e6-4d6d-80f3-381d112f949d-kube-api-access-7pwsf" (OuterVolumeSpecName: "kube-api-access-7pwsf") pod "b0969e89-a3e6-4d6d-80f3-381d112f949d" (UID: "b0969e89-a3e6-4d6d-80f3-381d112f949d"). InnerVolumeSpecName "kube-api-access-7pwsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.002082 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c803f2bd-9bf8-43fa-af9a-3a61bdac0972-kube-api-access-rthb2" (OuterVolumeSpecName: "kube-api-access-rthb2") pod "c803f2bd-9bf8-43fa-af9a-3a61bdac0972" (UID: "c803f2bd-9bf8-43fa-af9a-3a61bdac0972"). InnerVolumeSpecName "kube-api-access-rthb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.020935 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f60ece8a-8926-46ef-9ae9-126856f4b1b5-kube-api-access-7zv8w" (OuterVolumeSpecName: "kube-api-access-7zv8w") pod "f60ece8a-8926-46ef-9ae9-126856f4b1b5" (UID: "f60ece8a-8926-46ef-9ae9-126856f4b1b5"). InnerVolumeSpecName "kube-api-access-7zv8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.092312 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pwsf\" (UniqueName: \"kubernetes.io/projected/b0969e89-a3e6-4d6d-80f3-381d112f949d-kube-api-access-7pwsf\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.092412 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zv8w\" (UniqueName: \"kubernetes.io/projected/f60ece8a-8926-46ef-9ae9-126856f4b1b5-kube-api-access-7zv8w\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.092422 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rthb2\" (UniqueName: \"kubernetes.io/projected/c803f2bd-9bf8-43fa-af9a-3a61bdac0972-kube-api-access-rthb2\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.309145 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5qmtk" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.309147 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5qmtk" event={"ID":"c803f2bd-9bf8-43fa-af9a-3a61bdac0972","Type":"ContainerDied","Data":"49eab090ef1731271e82aa088771b2129aa81f6a887cb841540fae58fbbb9041"} Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.310297 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49eab090ef1731271e82aa088771b2129aa81f6a887cb841540fae58fbbb9041" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.311986 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-gwx5z" event={"ID":"b0969e89-a3e6-4d6d-80f3-381d112f949d","Type":"ContainerDied","Data":"78958b835dc97703dfd21d3eb9a21437b3bdbb4de28979e070889b6eb2e07b7c"} Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.312071 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78958b835dc97703dfd21d3eb9a21437b3bdbb4de28979e070889b6eb2e07b7c" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.312177 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-gwx5z" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.313534 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-hln5n" event={"ID":"f60ece8a-8926-46ef-9ae9-126856f4b1b5","Type":"ContainerDied","Data":"622b2dfdb8f4e6824aaad32391b576c5592d0de3809187f5dbf5cd15211c3064"} Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.313567 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="622b2dfdb8f4e6824aaad32391b576c5592d0de3809187f5dbf5cd15211c3064" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.313613 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-hln5n" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.986490 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:36:19 crc kubenswrapper[4813]: I1007 19:36:19.988504 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.122479 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.122617 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.138879 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-65d65664c-r46qm" Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.331620 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerStarted","Data":"57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597"} Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.331659 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-central-agent" containerID="cri-o://eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86" gracePeriod=30 Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.331771 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="proxy-httpd" containerID="cri-o://57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597" gracePeriod=30 Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.331824 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="sg-core" containerID="cri-o://e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37" gracePeriod=30 Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.331872 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-notification-agent" containerID="cri-o://0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662" gracePeriod=30 Oct 07 19:36:20 crc kubenswrapper[4813]: I1007 19:36:20.331700 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 19:36:20 crc kubenswrapper[4813]: E1007 19:36:20.519043 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8705c88f_a04a_4861_9e64_05bf5e90237f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddef784b9_3422_478d_8966_f6fe5aaa3b63.slice/crio-conmon-e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37.scope\": RecentStats: unable to find data in memory cache]" Oct 07 19:36:21 crc kubenswrapper[4813]: I1007 19:36:21.341985 4813 generic.go:334] "Generic (PLEG): container finished" podID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerID="e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37" exitCode=2 Oct 07 19:36:21 crc kubenswrapper[4813]: I1007 19:36:21.342204 4813 generic.go:334] "Generic (PLEG): container finished" podID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerID="0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662" exitCode=0 Oct 07 19:36:21 crc kubenswrapper[4813]: I1007 19:36:21.342059 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerDied","Data":"e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37"} Oct 07 19:36:21 crc kubenswrapper[4813]: I1007 19:36:21.342241 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerDied","Data":"0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662"} Oct 07 19:36:22 crc kubenswrapper[4813]: I1007 19:36:22.079073 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:36:22 crc kubenswrapper[4813]: I1007 19:36:22.079134 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.157439 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=7.078363062 podStartE2EDuration="12.157418129s" podCreationTimestamp="2025-10-07 19:36:13 +0000 UTC" firstStartedPulling="2025-10-07 19:36:14.337888061 +0000 UTC m=+1100.416143672" lastFinishedPulling="2025-10-07 19:36:19.416943128 +0000 UTC m=+1105.495198739" observedRunningTime="2025-10-07 19:36:20.360637199 +0000 UTC m=+1106.438892810" watchObservedRunningTime="2025-10-07 19:36:25.157418129 +0000 UTC m=+1111.235673740" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.171570 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a0b6-account-create-zz2gl"] Oct 07 19:36:25 crc kubenswrapper[4813]: E1007 19:36:25.171942 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c803f2bd-9bf8-43fa-af9a-3a61bdac0972" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.171953 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c803f2bd-9bf8-43fa-af9a-3a61bdac0972" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: E1007 19:36:25.171965 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0969e89-a3e6-4d6d-80f3-381d112f949d" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.171971 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0969e89-a3e6-4d6d-80f3-381d112f949d" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: E1007 19:36:25.171988 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f60ece8a-8926-46ef-9ae9-126856f4b1b5" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.171994 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f60ece8a-8926-46ef-9ae9-126856f4b1b5" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.172206 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0969e89-a3e6-4d6d-80f3-381d112f949d" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.172231 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c803f2bd-9bf8-43fa-af9a-3a61bdac0972" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.172238 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f60ece8a-8926-46ef-9ae9-126856f4b1b5" containerName="mariadb-database-create" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.172863 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.176793 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.179963 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a0b6-account-create-zz2gl"] Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.261824 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlkpf\" (UniqueName: \"kubernetes.io/projected/311a453f-d885-474f-a1e1-2c892940fdb0-kube-api-access-vlkpf\") pod \"nova-api-a0b6-account-create-zz2gl\" (UID: \"311a453f-d885-474f-a1e1-2c892940fdb0\") " pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.358429 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-1254-account-create-m5qfr"] Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.363800 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlkpf\" (UniqueName: \"kubernetes.io/projected/311a453f-d885-474f-a1e1-2c892940fdb0-kube-api-access-vlkpf\") pod \"nova-api-a0b6-account-create-zz2gl\" (UID: \"311a453f-d885-474f-a1e1-2c892940fdb0\") " pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.367778 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.373625 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1254-account-create-m5qfr"] Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.377075 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.395946 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlkpf\" (UniqueName: \"kubernetes.io/projected/311a453f-d885-474f-a1e1-2c892940fdb0-kube-api-access-vlkpf\") pod \"nova-api-a0b6-account-create-zz2gl\" (UID: \"311a453f-d885-474f-a1e1-2c892940fdb0\") " pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.465197 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8tql\" (UniqueName: \"kubernetes.io/projected/c0eed1e1-e367-4fec-a708-c20a5c871719-kube-api-access-b8tql\") pod \"nova-cell0-1254-account-create-m5qfr\" (UID: \"c0eed1e1-e367-4fec-a708-c20a5c871719\") " pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.525795 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.549307 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4d0c-account-create-gkbfl"] Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.550464 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.552729 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.570610 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8tql\" (UniqueName: \"kubernetes.io/projected/c0eed1e1-e367-4fec-a708-c20a5c871719-kube-api-access-b8tql\") pod \"nova-cell0-1254-account-create-m5qfr\" (UID: \"c0eed1e1-e367-4fec-a708-c20a5c871719\") " pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.580386 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4d0c-account-create-gkbfl"] Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.599025 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8tql\" (UniqueName: \"kubernetes.io/projected/c0eed1e1-e367-4fec-a708-c20a5c871719-kube-api-access-b8tql\") pod \"nova-cell0-1254-account-create-m5qfr\" (UID: \"c0eed1e1-e367-4fec-a708-c20a5c871719\") " pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.686044 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5xj8\" (UniqueName: \"kubernetes.io/projected/f203d9f1-8903-4490-bb6e-fe1e9d9988e4-kube-api-access-m5xj8\") pod \"nova-cell1-4d0c-account-create-gkbfl\" (UID: \"f203d9f1-8903-4490-bb6e-fe1e9d9988e4\") " pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.695888 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.787503 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5xj8\" (UniqueName: \"kubernetes.io/projected/f203d9f1-8903-4490-bb6e-fe1e9d9988e4-kube-api-access-m5xj8\") pod \"nova-cell1-4d0c-account-create-gkbfl\" (UID: \"f203d9f1-8903-4490-bb6e-fe1e9d9988e4\") " pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.805441 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5xj8\" (UniqueName: \"kubernetes.io/projected/f203d9f1-8903-4490-bb6e-fe1e9d9988e4-kube-api-access-m5xj8\") pod \"nova-cell1-4d0c-account-create-gkbfl\" (UID: \"f203d9f1-8903-4490-bb6e-fe1e9d9988e4\") " pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:25 crc kubenswrapper[4813]: I1007 19:36:25.974628 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.059828 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a0b6-account-create-zz2gl"] Oct 07 19:36:26 crc kubenswrapper[4813]: W1007 19:36:26.070618 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod311a453f_d885_474f_a1e1_2c892940fdb0.slice/crio-2c13a250bd10b686e10bf180e165b1cdb113f71e6c72d591e7323ca1ba106a06 WatchSource:0}: Error finding container 2c13a250bd10b686e10bf180e165b1cdb113f71e6c72d591e7323ca1ba106a06: Status 404 returned error can't find the container with id 2c13a250bd10b686e10bf180e165b1cdb113f71e6c72d591e7323ca1ba106a06 Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.316099 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1254-account-create-m5qfr"] Oct 07 19:36:26 crc kubenswrapper[4813]: W1007 19:36:26.331845 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0eed1e1_e367_4fec_a708_c20a5c871719.slice/crio-d9507f615c543f1125dcdb8661621168cc22f0e951844924ba29ddccb60e67b9 WatchSource:0}: Error finding container d9507f615c543f1125dcdb8661621168cc22f0e951844924ba29ddccb60e67b9: Status 404 returned error can't find the container with id d9507f615c543f1125dcdb8661621168cc22f0e951844924ba29ddccb60e67b9 Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.416979 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1254-account-create-m5qfr" event={"ID":"c0eed1e1-e367-4fec-a708-c20a5c871719","Type":"ContainerStarted","Data":"d9507f615c543f1125dcdb8661621168cc22f0e951844924ba29ddccb60e67b9"} Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.420721 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a0b6-account-create-zz2gl" event={"ID":"311a453f-d885-474f-a1e1-2c892940fdb0","Type":"ContainerStarted","Data":"1a85a2a8d49436c0be6af82fe48355c244a2b168b57de9028eab0ce5a27c71c6"} Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.420761 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a0b6-account-create-zz2gl" event={"ID":"311a453f-d885-474f-a1e1-2c892940fdb0","Type":"ContainerStarted","Data":"2c13a250bd10b686e10bf180e165b1cdb113f71e6c72d591e7323ca1ba106a06"} Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.443234 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-a0b6-account-create-zz2gl" podStartSLOduration=1.443216203 podStartE2EDuration="1.443216203s" podCreationTimestamp="2025-10-07 19:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:36:26.441786681 +0000 UTC m=+1112.520042292" watchObservedRunningTime="2025-10-07 19:36:26.443216203 +0000 UTC m=+1112.521471814" Oct 07 19:36:26 crc kubenswrapper[4813]: I1007 19:36:26.557747 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4d0c-account-create-gkbfl"] Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.430613 4813 generic.go:334] "Generic (PLEG): container finished" podID="311a453f-d885-474f-a1e1-2c892940fdb0" containerID="1a85a2a8d49436c0be6af82fe48355c244a2b168b57de9028eab0ce5a27c71c6" exitCode=0 Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.430706 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a0b6-account-create-zz2gl" event={"ID":"311a453f-d885-474f-a1e1-2c892940fdb0","Type":"ContainerDied","Data":"1a85a2a8d49436c0be6af82fe48355c244a2b168b57de9028eab0ce5a27c71c6"} Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.433271 4813 generic.go:334] "Generic (PLEG): container finished" podID="f203d9f1-8903-4490-bb6e-fe1e9d9988e4" containerID="8d82199ccc9b0360971dc8523bcf463552830858fa67f8f03c308c99c72cebbe" exitCode=0 Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.433347 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" event={"ID":"f203d9f1-8903-4490-bb6e-fe1e9d9988e4","Type":"ContainerDied","Data":"8d82199ccc9b0360971dc8523bcf463552830858fa67f8f03c308c99c72cebbe"} Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.433394 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" event={"ID":"f203d9f1-8903-4490-bb6e-fe1e9d9988e4","Type":"ContainerStarted","Data":"2853778c47e2872999f72e428e2e0cf5c3f0a68ffdb47bde5dd8abe7e403f902"} Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.436700 4813 generic.go:334] "Generic (PLEG): container finished" podID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerID="eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86" exitCode=0 Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.436795 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerDied","Data":"eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86"} Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.438795 4813 generic.go:334] "Generic (PLEG): container finished" podID="c0eed1e1-e367-4fec-a708-c20a5c871719" containerID="e9ff2fa9cd9fd0141162ef0ed37e05fcfcf4074ccf2d15deb75ac532fb5062a7" exitCode=0 Oct 07 19:36:27 crc kubenswrapper[4813]: I1007 19:36:27.438846 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1254-account-create-m5qfr" event={"ID":"c0eed1e1-e367-4fec-a708-c20a5c871719","Type":"ContainerDied","Data":"e9ff2fa9cd9fd0141162ef0ed37e05fcfcf4074ccf2d15deb75ac532fb5062a7"} Oct 07 19:36:28 crc kubenswrapper[4813]: I1007 19:36:28.981730 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:28 crc kubenswrapper[4813]: I1007 19:36:28.991935 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:28 crc kubenswrapper[4813]: I1007 19:36:28.996978 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.065084 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8tql\" (UniqueName: \"kubernetes.io/projected/c0eed1e1-e367-4fec-a708-c20a5c871719-kube-api-access-b8tql\") pod \"c0eed1e1-e367-4fec-a708-c20a5c871719\" (UID: \"c0eed1e1-e367-4fec-a708-c20a5c871719\") " Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.071624 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0eed1e1-e367-4fec-a708-c20a5c871719-kube-api-access-b8tql" (OuterVolumeSpecName: "kube-api-access-b8tql") pod "c0eed1e1-e367-4fec-a708-c20a5c871719" (UID: "c0eed1e1-e367-4fec-a708-c20a5c871719"). InnerVolumeSpecName "kube-api-access-b8tql". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.167491 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlkpf\" (UniqueName: \"kubernetes.io/projected/311a453f-d885-474f-a1e1-2c892940fdb0-kube-api-access-vlkpf\") pod \"311a453f-d885-474f-a1e1-2c892940fdb0\" (UID: \"311a453f-d885-474f-a1e1-2c892940fdb0\") " Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.167630 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5xj8\" (UniqueName: \"kubernetes.io/projected/f203d9f1-8903-4490-bb6e-fe1e9d9988e4-kube-api-access-m5xj8\") pod \"f203d9f1-8903-4490-bb6e-fe1e9d9988e4\" (UID: \"f203d9f1-8903-4490-bb6e-fe1e9d9988e4\") " Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.168238 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8tql\" (UniqueName: \"kubernetes.io/projected/c0eed1e1-e367-4fec-a708-c20a5c871719-kube-api-access-b8tql\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.171781 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f203d9f1-8903-4490-bb6e-fe1e9d9988e4-kube-api-access-m5xj8" (OuterVolumeSpecName: "kube-api-access-m5xj8") pod "f203d9f1-8903-4490-bb6e-fe1e9d9988e4" (UID: "f203d9f1-8903-4490-bb6e-fe1e9d9988e4"). InnerVolumeSpecName "kube-api-access-m5xj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.172977 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/311a453f-d885-474f-a1e1-2c892940fdb0-kube-api-access-vlkpf" (OuterVolumeSpecName: "kube-api-access-vlkpf") pod "311a453f-d885-474f-a1e1-2c892940fdb0" (UID: "311a453f-d885-474f-a1e1-2c892940fdb0"). InnerVolumeSpecName "kube-api-access-vlkpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.270538 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlkpf\" (UniqueName: \"kubernetes.io/projected/311a453f-d885-474f-a1e1-2c892940fdb0-kube-api-access-vlkpf\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.270569 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5xj8\" (UniqueName: \"kubernetes.io/projected/f203d9f1-8903-4490-bb6e-fe1e9d9988e4-kube-api-access-m5xj8\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.455378 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1254-account-create-m5qfr" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.455440 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1254-account-create-m5qfr" event={"ID":"c0eed1e1-e367-4fec-a708-c20a5c871719","Type":"ContainerDied","Data":"d9507f615c543f1125dcdb8661621168cc22f0e951844924ba29ddccb60e67b9"} Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.455492 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9507f615c543f1125dcdb8661621168cc22f0e951844924ba29ddccb60e67b9" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.457572 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a0b6-account-create-zz2gl" event={"ID":"311a453f-d885-474f-a1e1-2c892940fdb0","Type":"ContainerDied","Data":"2c13a250bd10b686e10bf180e165b1cdb113f71e6c72d591e7323ca1ba106a06"} Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.457604 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c13a250bd10b686e10bf180e165b1cdb113f71e6c72d591e7323ca1ba106a06" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.457641 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a0b6-account-create-zz2gl" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.459852 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" event={"ID":"f203d9f1-8903-4490-bb6e-fe1e9d9988e4","Type":"ContainerDied","Data":"2853778c47e2872999f72e428e2e0cf5c3f0a68ffdb47bde5dd8abe7e403f902"} Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.459894 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2853778c47e2872999f72e428e2e0cf5c3f0a68ffdb47bde5dd8abe7e403f902" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.459958 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4d0c-account-create-gkbfl" Oct 07 19:36:29 crc kubenswrapper[4813]: I1007 19:36:29.987422 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.125088 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-687ddb5b-lwwn2" podUID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.792155 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vr57t"] Oct 07 19:36:30 crc kubenswrapper[4813]: E1007 19:36:30.792503 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311a453f-d885-474f-a1e1-2c892940fdb0" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.792786 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="311a453f-d885-474f-a1e1-2c892940fdb0" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: E1007 19:36:30.792806 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0eed1e1-e367-4fec-a708-c20a5c871719" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.792812 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0eed1e1-e367-4fec-a708-c20a5c871719" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: E1007 19:36:30.792823 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f203d9f1-8903-4490-bb6e-fe1e9d9988e4" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.792829 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f203d9f1-8903-4490-bb6e-fe1e9d9988e4" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.793000 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f203d9f1-8903-4490-bb6e-fe1e9d9988e4" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.793029 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="311a453f-d885-474f-a1e1-2c892940fdb0" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.793044 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0eed1e1-e367-4fec-a708-c20a5c871719" containerName="mariadb-account-create" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.818699 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.827671 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9wnbj" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.827916 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.828071 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 07 19:36:30 crc kubenswrapper[4813]: E1007 19:36:30.832094 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8705c88f_a04a_4861_9e64_05bf5e90237f.slice\": RecentStats: unable to find data in memory cache]" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.865925 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vr57t"] Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.964316 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s6s2\" (UniqueName: \"kubernetes.io/projected/0057db6e-821d-4404-bc89-2a03563c71d2-kube-api-access-9s6s2\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.964418 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-scripts\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.964492 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:30 crc kubenswrapper[4813]: I1007 19:36:30.964527 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-config-data\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.065567 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-scripts\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.066584 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.066706 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-config-data\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.066891 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s6s2\" (UniqueName: \"kubernetes.io/projected/0057db6e-821d-4404-bc89-2a03563c71d2-kube-api-access-9s6s2\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.076975 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-scripts\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.093836 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.099863 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-config-data\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.102863 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s6s2\" (UniqueName: \"kubernetes.io/projected/0057db6e-821d-4404-bc89-2a03563c71d2-kube-api-access-9s6s2\") pod \"nova-cell0-conductor-db-sync-vr57t\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.164655 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:31 crc kubenswrapper[4813]: W1007 19:36:31.699731 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0057db6e_821d_4404_bc89_2a03563c71d2.slice/crio-1c834867b93a25048a361d5bb00fc886230ff6e20b7decfe26ce6536ce4c5618 WatchSource:0}: Error finding container 1c834867b93a25048a361d5bb00fc886230ff6e20b7decfe26ce6536ce4c5618: Status 404 returned error can't find the container with id 1c834867b93a25048a361d5bb00fc886230ff6e20b7decfe26ce6536ce4c5618 Oct 07 19:36:31 crc kubenswrapper[4813]: I1007 19:36:31.703225 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vr57t"] Oct 07 19:36:32 crc kubenswrapper[4813]: I1007 19:36:32.489086 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vr57t" event={"ID":"0057db6e-821d-4404-bc89-2a03563c71d2","Type":"ContainerStarted","Data":"1c834867b93a25048a361d5bb00fc886230ff6e20b7decfe26ce6536ce4c5618"} Oct 07 19:36:32 crc kubenswrapper[4813]: I1007 19:36:32.638188 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:36:32 crc kubenswrapper[4813]: I1007 19:36:32.638454 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-log" containerID="cri-o://05983812707cee45fbf3a6f60bea7bf2ff2c169c6521d9e461df7fd46720a556" gracePeriod=30 Oct 07 19:36:32 crc kubenswrapper[4813]: I1007 19:36:32.638548 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-httpd" containerID="cri-o://0d9f0d404ce1ca7da8ab09baec87d7db50f8840239f534262bd171063d95ed82" gracePeriod=30 Oct 07 19:36:33 crc kubenswrapper[4813]: I1007 19:36:33.529997 4813 generic.go:334] "Generic (PLEG): container finished" podID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerID="05983812707cee45fbf3a6f60bea7bf2ff2c169c6521d9e461df7fd46720a556" exitCode=143 Oct 07 19:36:33 crc kubenswrapper[4813]: I1007 19:36:33.530046 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"825f92d1-f764-41bf-89ec-a0760b63ebff","Type":"ContainerDied","Data":"05983812707cee45fbf3a6f60bea7bf2ff2c169c6521d9e461df7fd46720a556"} Oct 07 19:36:33 crc kubenswrapper[4813]: I1007 19:36:33.531850 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:36:33 crc kubenswrapper[4813]: I1007 19:36:33.532093 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-log" containerID="cri-o://297bbe6065a680be07f4512a4e6250711142778fb47889e205be640811d52334" gracePeriod=30 Oct 07 19:36:33 crc kubenswrapper[4813]: I1007 19:36:33.532229 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-httpd" containerID="cri-o://5e911e5b0d8acb81490e42113315e655e725676e24019c286d83bbf88af49d75" gracePeriod=30 Oct 07 19:36:34 crc kubenswrapper[4813]: I1007 19:36:34.539907 4813 generic.go:334] "Generic (PLEG): container finished" podID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerID="297bbe6065a680be07f4512a4e6250711142778fb47889e205be640811d52334" exitCode=143 Oct 07 19:36:34 crc kubenswrapper[4813]: I1007 19:36:34.539949 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8e5ad266-d270-4685-81e4-85c81a1853fb","Type":"ContainerDied","Data":"297bbe6065a680be07f4512a4e6250711142778fb47889e205be640811d52334"} Oct 07 19:36:36 crc kubenswrapper[4813]: I1007 19:36:36.560408 4813 generic.go:334] "Generic (PLEG): container finished" podID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerID="0d9f0d404ce1ca7da8ab09baec87d7db50f8840239f534262bd171063d95ed82" exitCode=0 Oct 07 19:36:36 crc kubenswrapper[4813]: I1007 19:36:36.560464 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"825f92d1-f764-41bf-89ec-a0760b63ebff","Type":"ContainerDied","Data":"0d9f0d404ce1ca7da8ab09baec87d7db50f8840239f534262bd171063d95ed82"} Oct 07 19:36:37 crc kubenswrapper[4813]: I1007 19:36:37.573455 4813 generic.go:334] "Generic (PLEG): container finished" podID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerID="5e911e5b0d8acb81490e42113315e655e725676e24019c286d83bbf88af49d75" exitCode=0 Oct 07 19:36:37 crc kubenswrapper[4813]: I1007 19:36:37.573724 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8e5ad266-d270-4685-81e4-85c81a1853fb","Type":"ContainerDied","Data":"5e911e5b0d8acb81490e42113315e655e725676e24019c286d83bbf88af49d75"} Oct 07 19:36:39 crc kubenswrapper[4813]: I1007 19:36:39.915662 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.156:9292/healthcheck\": dial tcp 10.217.0.156:9292: connect: connection refused" Oct 07 19:36:39 crc kubenswrapper[4813]: I1007 19:36:39.916149 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.156:9292/healthcheck\": dial tcp 10.217.0.156:9292: connect: connection refused" Oct 07 19:36:39 crc kubenswrapper[4813]: I1007 19:36:39.987315 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:36:40 crc kubenswrapper[4813]: I1007 19:36:40.123673 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-687ddb5b-lwwn2" podUID="a0b0d403-9a0c-407b-a3d4-a0db3e612092" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 07 19:36:41 crc kubenswrapper[4813]: E1007 19:36:41.167261 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8705c88f_a04a_4861_9e64_05bf5e90237f.slice\": RecentStats: unable to find data in memory cache]" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.103477 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180249 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-internal-tls-certs\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180287 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-combined-ca-bundle\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180360 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-config-data\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180381 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-logs\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180413 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-httpd-run\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180440 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkcvd\" (UniqueName: \"kubernetes.io/projected/8e5ad266-d270-4685-81e4-85c81a1853fb-kube-api-access-xkcvd\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180890 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.180988 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.181014 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-scripts\") pod \"8e5ad266-d270-4685-81e4-85c81a1853fb\" (UID: \"8e5ad266-d270-4685-81e4-85c81a1853fb\") " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.181013 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-logs" (OuterVolumeSpecName: "logs") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.181446 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.181460 4813 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5ad266-d270-4685-81e4-85c81a1853fb-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.187027 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.187127 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-scripts" (OuterVolumeSpecName: "scripts") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.193412 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e5ad266-d270-4685-81e4-85c81a1853fb-kube-api-access-xkcvd" (OuterVolumeSpecName: "kube-api-access-xkcvd") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "kube-api-access-xkcvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.230535 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-config-data" (OuterVolumeSpecName: "config-data") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.241381 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.265675 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8e5ad266-d270-4685-81e4-85c81a1853fb" (UID: "8e5ad266-d270-4685-81e4-85c81a1853fb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.283616 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.283645 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.283656 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.283666 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.283678 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5ad266-d270-4685-81e4-85c81a1853fb-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.283690 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkcvd\" (UniqueName: \"kubernetes.io/projected/8e5ad266-d270-4685-81e4-85c81a1853fb-kube-api-access-xkcvd\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.302302 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.384872 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.625666 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vr57t" event={"ID":"0057db6e-821d-4404-bc89-2a03563c71d2","Type":"ContainerStarted","Data":"c46c699ded93e6218c7c209945fad38f61b4069a65e8ad59eace5ab97176021f"} Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.631403 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8e5ad266-d270-4685-81e4-85c81a1853fb","Type":"ContainerDied","Data":"6ca08a0f4cdd549e098639288f800d99a8fd9f1158ce09c799e57104b35fd816"} Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.631551 4813 scope.go:117] "RemoveContainer" containerID="5e911e5b0d8acb81490e42113315e655e725676e24019c286d83bbf88af49d75" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.631737 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.650461 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vr57t" podStartSLOduration=2.612590361 podStartE2EDuration="12.650434935s" podCreationTimestamp="2025-10-07 19:36:30 +0000 UTC" firstStartedPulling="2025-10-07 19:36:31.701593761 +0000 UTC m=+1117.779849382" lastFinishedPulling="2025-10-07 19:36:41.739438345 +0000 UTC m=+1127.817693956" observedRunningTime="2025-10-07 19:36:42.643597567 +0000 UTC m=+1128.721853178" watchObservedRunningTime="2025-10-07 19:36:42.650434935 +0000 UTC m=+1128.728690556" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.674868 4813 scope.go:117] "RemoveContainer" containerID="297bbe6065a680be07f4512a4e6250711142778fb47889e205be640811d52334" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.704777 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.750366 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.778777 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:36:42 crc kubenswrapper[4813]: E1007 19:36:42.779464 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-httpd" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.779482 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-httpd" Oct 07 19:36:42 crc kubenswrapper[4813]: E1007 19:36:42.779494 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-log" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.779502 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-log" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.779851 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-log" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.779898 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" containerName="glance-httpd" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.781394 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.784142 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.784985 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.791838 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895213 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895305 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84cd7775-d255-44d6-a361-0fd247bb406d-logs\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895365 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swq9x\" (UniqueName: \"kubernetes.io/projected/84cd7775-d255-44d6-a361-0fd247bb406d-kube-api-access-swq9x\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895390 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895444 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895488 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/84cd7775-d255-44d6-a361-0fd247bb406d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895513 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.895569 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.947402 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997383 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997456 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/84cd7775-d255-44d6-a361-0fd247bb406d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997487 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997546 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997588 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997630 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84cd7775-d255-44d6-a361-0fd247bb406d-logs\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997654 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swq9x\" (UniqueName: \"kubernetes.io/projected/84cd7775-d255-44d6-a361-0fd247bb406d-kube-api-access-swq9x\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.997671 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.999384 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84cd7775-d255-44d6-a361-0fd247bb406d-logs\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.999461 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/84cd7775-d255-44d6-a361-0fd247bb406d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:42 crc kubenswrapper[4813]: I1007 19:36:42.999839 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.002920 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.013019 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.018950 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.023317 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swq9x\" (UniqueName: \"kubernetes.io/projected/84cd7775-d255-44d6-a361-0fd247bb406d-kube-api-access-swq9x\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.039900 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84cd7775-d255-44d6-a361-0fd247bb406d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.065900 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"84cd7775-d255-44d6-a361-0fd247bb406d\") " pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099409 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-public-tls-certs\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099673 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n225g\" (UniqueName: \"kubernetes.io/projected/825f92d1-f764-41bf-89ec-a0760b63ebff-kube-api-access-n225g\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099752 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-config-data\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099810 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-httpd-run\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099884 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-scripts\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099942 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-logs\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099964 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-combined-ca-bundle\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.099984 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"825f92d1-f764-41bf-89ec-a0760b63ebff\" (UID: \"825f92d1-f764-41bf-89ec-a0760b63ebff\") " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.100639 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.101750 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-logs" (OuterVolumeSpecName: "logs") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.104882 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.106903 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.107550 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/825f92d1-f764-41bf-89ec-a0760b63ebff-kube-api-access-n225g" (OuterVolumeSpecName: "kube-api-access-n225g") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "kube-api-access-n225g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.124824 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-scripts" (OuterVolumeSpecName: "scripts") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.156344 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.159472 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-config-data" (OuterVolumeSpecName: "config-data") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.164525 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "825f92d1-f764-41bf-89ec-a0760b63ebff" (UID: "825f92d1-f764-41bf-89ec-a0760b63ebff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.203923 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.203949 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.203985 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.203995 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.204004 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n225g\" (UniqueName: \"kubernetes.io/projected/825f92d1-f764-41bf-89ec-a0760b63ebff-kube-api-access-n225g\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.204013 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.204022 4813 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/825f92d1-f764-41bf-89ec-a0760b63ebff-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.204030 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/825f92d1-f764-41bf-89ec-a0760b63ebff-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.243877 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.307797 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.645748 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.645748 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"825f92d1-f764-41bf-89ec-a0760b63ebff","Type":"ContainerDied","Data":"6949446b9f126f3bbc5ac54ae5b7c1363c14f074d616943d5e5783c9d3e32a85"} Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.646226 4813 scope.go:117] "RemoveContainer" containerID="0d9f0d404ce1ca7da8ab09baec87d7db50f8840239f534262bd171063d95ed82" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.675520 4813 scope.go:117] "RemoveContainer" containerID="05983812707cee45fbf3a6f60bea7bf2ff2c169c6521d9e461df7fd46720a556" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.679311 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.696587 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.703593 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.724504 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:36:43 crc kubenswrapper[4813]: E1007 19:36:43.724935 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-httpd" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.724956 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-httpd" Oct 07 19:36:43 crc kubenswrapper[4813]: E1007 19:36:43.724997 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-log" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.725003 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-log" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.725192 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-log" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.725218 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" containerName="glance-httpd" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.726126 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.734344 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.738781 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.738962 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815191 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815289 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lj8x\" (UniqueName: \"kubernetes.io/projected/e7153372-cc47-4ff1-8481-b04a58c5c587-kube-api-access-8lj8x\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815317 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7153372-cc47-4ff1-8481-b04a58c5c587-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815362 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815408 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-config-data\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815437 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-scripts\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815462 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.815484 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7153372-cc47-4ff1-8481-b04a58c5c587-logs\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.916788 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-config-data\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.916846 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-scripts\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.916869 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.916888 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7153372-cc47-4ff1-8481-b04a58c5c587-logs\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.916942 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.917000 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lj8x\" (UniqueName: \"kubernetes.io/projected/e7153372-cc47-4ff1-8481-b04a58c5c587-kube-api-access-8lj8x\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.917023 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7153372-cc47-4ff1-8481-b04a58c5c587-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.917051 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.918043 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7153372-cc47-4ff1-8481-b04a58c5c587-logs\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.918253 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.918540 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e7153372-cc47-4ff1-8481-b04a58c5c587-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.921942 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-config-data\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.922525 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.923719 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-scripts\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.937667 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7153372-cc47-4ff1-8481-b04a58c5c587-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.938590 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lj8x\" (UniqueName: \"kubernetes.io/projected/e7153372-cc47-4ff1-8481-b04a58c5c587-kube-api-access-8lj8x\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:43 crc kubenswrapper[4813]: I1007 19:36:43.957651 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"e7153372-cc47-4ff1-8481-b04a58c5c587\") " pod="openstack/glance-default-external-api-0" Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.036583 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.081196 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.623461 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="825f92d1-f764-41bf-89ec-a0760b63ebff" path="/var/lib/kubelet/pods/825f92d1-f764-41bf-89ec-a0760b63ebff/volumes" Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.624818 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e5ad266-d270-4685-81e4-85c81a1853fb" path="/var/lib/kubelet/pods/8e5ad266-d270-4685-81e4-85c81a1853fb/volumes" Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.690851 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"84cd7775-d255-44d6-a361-0fd247bb406d","Type":"ContainerStarted","Data":"2d6a2b9aa0f6fa111f7fd379e5177af394dabe0f4d682ec820d3d94b59b4d9ba"} Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.690919 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"84cd7775-d255-44d6-a361-0fd247bb406d","Type":"ContainerStarted","Data":"fe46c400baf7413416d220c403828a8c8911b1ee7d7195e4cea62197987d4694"} Oct 07 19:36:44 crc kubenswrapper[4813]: I1007 19:36:44.817919 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 07 19:36:45 crc kubenswrapper[4813]: I1007 19:36:45.710492 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"84cd7775-d255-44d6-a361-0fd247bb406d","Type":"ContainerStarted","Data":"d311ec6f3ddbe56917dee8b2709337c736f12eccb5feb14d0927065b287e17ac"} Oct 07 19:36:45 crc kubenswrapper[4813]: I1007 19:36:45.718048 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7153372-cc47-4ff1-8481-b04a58c5c587","Type":"ContainerStarted","Data":"792e4f13270a10c853b7b13ede89573acc01cca0fd231faa5117c9237e9545d6"} Oct 07 19:36:45 crc kubenswrapper[4813]: I1007 19:36:45.718083 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7153372-cc47-4ff1-8481-b04a58c5c587","Type":"ContainerStarted","Data":"66554620a9f72dc7dc0ef7658befeaadbbe5d28ffcbfdaf7a11c2b64421e409e"} Oct 07 19:36:45 crc kubenswrapper[4813]: I1007 19:36:45.736168 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.736156269 podStartE2EDuration="3.736156269s" podCreationTimestamp="2025-10-07 19:36:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:36:45.733727389 +0000 UTC m=+1131.811983030" watchObservedRunningTime="2025-10-07 19:36:45.736156269 +0000 UTC m=+1131.814411880" Oct 07 19:36:46 crc kubenswrapper[4813]: I1007 19:36:46.733216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e7153372-cc47-4ff1-8481-b04a58c5c587","Type":"ContainerStarted","Data":"3ef72ae706843bf3f39a0111ebcd7f6936d4c34cc3379f1cd4649d8a14932f5c"} Oct 07 19:36:46 crc kubenswrapper[4813]: I1007 19:36:46.760605 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.760580917 podStartE2EDuration="3.760580917s" podCreationTimestamp="2025-10-07 19:36:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:36:46.75622566 +0000 UTC m=+1132.834481291" watchObservedRunningTime="2025-10-07 19:36:46.760580917 +0000 UTC m=+1132.838836538" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.721065 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.777381 4813 generic.go:334] "Generic (PLEG): container finished" podID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerID="57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597" exitCode=137 Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.777432 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerDied","Data":"57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597"} Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.777450 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.777465 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"def784b9-3422-478d-8966-f6fe5aaa3b63","Type":"ContainerDied","Data":"d58b2604f5ab29704bdd92eb02723233b134369a738e96165957f066a799c6db"} Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.777487 4813 scope.go:117] "RemoveContainer" containerID="57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.799494 4813 scope.go:117] "RemoveContainer" containerID="e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.816856 4813 scope.go:117] "RemoveContainer" containerID="0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.838803 4813 scope.go:117] "RemoveContainer" containerID="eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.859742 4813 scope.go:117] "RemoveContainer" containerID="57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597" Oct 07 19:36:50 crc kubenswrapper[4813]: E1007 19:36:50.860244 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597\": container with ID starting with 57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597 not found: ID does not exist" containerID="57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.860277 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597"} err="failed to get container status \"57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597\": rpc error: code = NotFound desc = could not find container \"57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597\": container with ID starting with 57bea2c707de39be1b0df518b2ad86d403bbcec253a87632049aeb73f9526597 not found: ID does not exist" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.860298 4813 scope.go:117] "RemoveContainer" containerID="e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37" Oct 07 19:36:50 crc kubenswrapper[4813]: E1007 19:36:50.860692 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37\": container with ID starting with e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37 not found: ID does not exist" containerID="e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.860718 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37"} err="failed to get container status \"e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37\": rpc error: code = NotFound desc = could not find container \"e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37\": container with ID starting with e87d4a7a02ea17cbe09440b09a3f0bdf198c78cc5f60685117a7aef6ff246e37 not found: ID does not exist" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.860733 4813 scope.go:117] "RemoveContainer" containerID="0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662" Oct 07 19:36:50 crc kubenswrapper[4813]: E1007 19:36:50.860961 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662\": container with ID starting with 0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662 not found: ID does not exist" containerID="0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.860984 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662"} err="failed to get container status \"0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662\": rpc error: code = NotFound desc = could not find container \"0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662\": container with ID starting with 0777613e5bff6ea1011a1b05722f8617dd723ce22c65eb1e7ab3686ef75be662 not found: ID does not exist" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.860998 4813 scope.go:117] "RemoveContainer" containerID="eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861091 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-sg-core-conf-yaml\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: E1007 19:36:50.861194 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86\": container with ID starting with eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86 not found: ID does not exist" containerID="eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861225 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86"} err="failed to get container status \"eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86\": rpc error: code = NotFound desc = could not find container \"eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86\": container with ID starting with eb251f5608b4ce507644b203e67afd470610eb55e87c97ca04498522cb45da86 not found: ID does not exist" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861254 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-scripts\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861283 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-run-httpd\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861339 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-log-httpd\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861373 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-combined-ca-bundle\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861399 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjg2l\" (UniqueName: \"kubernetes.io/projected/def784b9-3422-478d-8966-f6fe5aaa3b63-kube-api-access-cjg2l\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.861438 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-config-data\") pod \"def784b9-3422-478d-8966-f6fe5aaa3b63\" (UID: \"def784b9-3422-478d-8966-f6fe5aaa3b63\") " Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.863108 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.863751 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.867235 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/def784b9-3422-478d-8966-f6fe5aaa3b63-kube-api-access-cjg2l" (OuterVolumeSpecName: "kube-api-access-cjg2l") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "kube-api-access-cjg2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.868234 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-scripts" (OuterVolumeSpecName: "scripts") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.904269 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.949866 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.963941 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.963993 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.964005 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/def784b9-3422-478d-8966-f6fe5aaa3b63-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.964013 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.964046 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjg2l\" (UniqueName: \"kubernetes.io/projected/def784b9-3422-478d-8966-f6fe5aaa3b63-kube-api-access-cjg2l\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.964055 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:50 crc kubenswrapper[4813]: I1007 19:36:50.976405 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-config-data" (OuterVolumeSpecName: "config-data") pod "def784b9-3422-478d-8966-f6fe5aaa3b63" (UID: "def784b9-3422-478d-8966-f6fe5aaa3b63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.066453 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/def784b9-3422-478d-8966-f6fe5aaa3b63-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.113803 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.123053 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.144499 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:51 crc kubenswrapper[4813]: E1007 19:36:51.144898 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="sg-core" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.144909 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="sg-core" Oct 07 19:36:51 crc kubenswrapper[4813]: E1007 19:36:51.144939 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="proxy-httpd" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.144945 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="proxy-httpd" Oct 07 19:36:51 crc kubenswrapper[4813]: E1007 19:36:51.144959 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-central-agent" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.144966 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-central-agent" Oct 07 19:36:51 crc kubenswrapper[4813]: E1007 19:36:51.144982 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-notification-agent" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.144987 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-notification-agent" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.145149 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="proxy-httpd" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.145159 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="sg-core" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.145170 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-notification-agent" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.145176 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" containerName="ceilometer-central-agent" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.146677 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.150533 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.150594 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.174262 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269710 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-log-httpd\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269792 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-scripts\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269813 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-run-httpd\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269864 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ttxn\" (UniqueName: \"kubernetes.io/projected/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-kube-api-access-9ttxn\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269884 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-config-data\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269912 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.269944 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.371455 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.371836 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372429 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-log-httpd\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372501 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-scripts\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372520 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-run-httpd\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372582 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ttxn\" (UniqueName: \"kubernetes.io/projected/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-kube-api-access-9ttxn\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372606 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-config-data\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372712 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-log-httpd\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.372924 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-run-httpd\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.378012 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.378187 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-scripts\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.378796 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.381203 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-config-data\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.398407 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ttxn\" (UniqueName: \"kubernetes.io/projected/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-kube-api-access-9ttxn\") pod \"ceilometer-0\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: E1007 19:36:51.448813 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8705c88f_a04a_4861_9e64_05bf5e90237f.slice\": RecentStats: unable to find data in memory cache]" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.503173 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:36:51 crc kubenswrapper[4813]: I1007 19:36:51.976353 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:36:52 crc kubenswrapper[4813]: I1007 19:36:52.079875 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:36:52 crc kubenswrapper[4813]: I1007 19:36:52.079947 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:36:52 crc kubenswrapper[4813]: I1007 19:36:52.581940 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:36:52 crc kubenswrapper[4813]: I1007 19:36:52.587589 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:36:52 crc kubenswrapper[4813]: I1007 19:36:52.615542 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="def784b9-3422-478d-8966-f6fe5aaa3b63" path="/var/lib/kubelet/pods/def784b9-3422-478d-8966-f6fe5aaa3b63/volumes" Oct 07 19:36:52 crc kubenswrapper[4813]: I1007 19:36:52.795903 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerStarted","Data":"d3e0385f15ad671b8278ae1bb891a260fb8b97edd0d300e19275f7718e94e06e"} Oct 07 19:36:53 crc kubenswrapper[4813]: I1007 19:36:53.107829 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:53 crc kubenswrapper[4813]: I1007 19:36:53.108141 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:53 crc kubenswrapper[4813]: I1007 19:36:53.186759 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:53 crc kubenswrapper[4813]: I1007 19:36:53.196733 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:53 crc kubenswrapper[4813]: I1007 19:36:53.803215 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:53 crc kubenswrapper[4813]: I1007 19:36:53.803491 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.082692 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.083092 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.115918 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.146051 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.812733 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerStarted","Data":"b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc"} Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.813071 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerStarted","Data":"4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2"} Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.813751 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.813954 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.883883 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-687ddb5b-lwwn2" Oct 07 19:36:54 crc kubenswrapper[4813]: I1007 19:36:54.894743 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:36:55 crc kubenswrapper[4813]: I1007 19:36:55.016844 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-688984b46d-g79nd"] Oct 07 19:36:55 crc kubenswrapper[4813]: I1007 19:36:55.829990 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon-log" containerID="cri-o://a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a" gracePeriod=30 Oct 07 19:36:55 crc kubenswrapper[4813]: I1007 19:36:55.831569 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" containerID="cri-o://f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86" gracePeriod=30 Oct 07 19:36:56 crc kubenswrapper[4813]: I1007 19:36:56.838925 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerStarted","Data":"4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a"} Oct 07 19:36:56 crc kubenswrapper[4813]: I1007 19:36:56.840360 4813 generic.go:334] "Generic (PLEG): container finished" podID="0057db6e-821d-4404-bc89-2a03563c71d2" containerID="c46c699ded93e6218c7c209945fad38f61b4069a65e8ad59eace5ab97176021f" exitCode=0 Oct 07 19:36:56 crc kubenswrapper[4813]: I1007 19:36:56.840388 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vr57t" event={"ID":"0057db6e-821d-4404-bc89-2a03563c71d2","Type":"ContainerDied","Data":"c46c699ded93e6218c7c209945fad38f61b4069a65e8ad59eace5ab97176021f"} Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.320093 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.320189 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.367995 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.630664 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.630758 4813 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.678920 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.861503 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerStarted","Data":"bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7"} Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.875932 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 19:36:57 crc kubenswrapper[4813]: I1007 19:36:57.947848 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.465665729 podStartE2EDuration="6.947832222s" podCreationTimestamp="2025-10-07 19:36:51 +0000 UTC" firstStartedPulling="2025-10-07 19:36:51.981950448 +0000 UTC m=+1138.060206059" lastFinishedPulling="2025-10-07 19:36:57.464116941 +0000 UTC m=+1143.542372552" observedRunningTime="2025-10-07 19:36:57.92714859 +0000 UTC m=+1144.005404201" watchObservedRunningTime="2025-10-07 19:36:57.947832222 +0000 UTC m=+1144.026087823" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.360085 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.453578 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-combined-ca-bundle\") pod \"0057db6e-821d-4404-bc89-2a03563c71d2\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.453918 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-scripts\") pod \"0057db6e-821d-4404-bc89-2a03563c71d2\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.454137 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-config-data\") pod \"0057db6e-821d-4404-bc89-2a03563c71d2\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.454300 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9s6s2\" (UniqueName: \"kubernetes.io/projected/0057db6e-821d-4404-bc89-2a03563c71d2-kube-api-access-9s6s2\") pod \"0057db6e-821d-4404-bc89-2a03563c71d2\" (UID: \"0057db6e-821d-4404-bc89-2a03563c71d2\") " Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.458300 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0057db6e-821d-4404-bc89-2a03563c71d2-kube-api-access-9s6s2" (OuterVolumeSpecName: "kube-api-access-9s6s2") pod "0057db6e-821d-4404-bc89-2a03563c71d2" (UID: "0057db6e-821d-4404-bc89-2a03563c71d2"). InnerVolumeSpecName "kube-api-access-9s6s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.458557 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-scripts" (OuterVolumeSpecName: "scripts") pod "0057db6e-821d-4404-bc89-2a03563c71d2" (UID: "0057db6e-821d-4404-bc89-2a03563c71d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.479095 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0057db6e-821d-4404-bc89-2a03563c71d2" (UID: "0057db6e-821d-4404-bc89-2a03563c71d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.485887 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-config-data" (OuterVolumeSpecName: "config-data") pod "0057db6e-821d-4404-bc89-2a03563c71d2" (UID: "0057db6e-821d-4404-bc89-2a03563c71d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.556847 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.557106 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.557183 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0057db6e-821d-4404-bc89-2a03563c71d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.557282 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9s6s2\" (UniqueName: \"kubernetes.io/projected/0057db6e-821d-4404-bc89-2a03563c71d2-kube-api-access-9s6s2\") on node \"crc\" DevicePath \"\"" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.862566 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vr57t" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.863036 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vr57t" event={"ID":"0057db6e-821d-4404-bc89-2a03563c71d2","Type":"ContainerDied","Data":"1c834867b93a25048a361d5bb00fc886230ff6e20b7decfe26ce6536ce4c5618"} Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.863068 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c834867b93a25048a361d5bb00fc886230ff6e20b7decfe26ce6536ce4c5618" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.972810 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 19:36:58 crc kubenswrapper[4813]: E1007 19:36:58.973154 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0057db6e-821d-4404-bc89-2a03563c71d2" containerName="nova-cell0-conductor-db-sync" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.973166 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0057db6e-821d-4404-bc89-2a03563c71d2" containerName="nova-cell0-conductor-db-sync" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.973390 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0057db6e-821d-4404-bc89-2a03563c71d2" containerName="nova-cell0-conductor-db-sync" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.973918 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.980335 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-9wnbj" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.980751 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 07 19:36:58 crc kubenswrapper[4813]: I1007 19:36:58.987534 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.068497 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7kkr\" (UniqueName: \"kubernetes.io/projected/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-kube-api-access-r7kkr\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.068853 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.068872 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.170830 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.170882 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.170987 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7kkr\" (UniqueName: \"kubernetes.io/projected/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-kube-api-access-r7kkr\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.176534 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.194237 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.211577 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7kkr\" (UniqueName: \"kubernetes.io/projected/c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1-kube-api-access-r7kkr\") pod \"nova-cell0-conductor-0\" (UID: \"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1\") " pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.294508 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.769848 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 07 19:36:59 crc kubenswrapper[4813]: W1007 19:36:59.789272 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7632bfd_361b_4c06_a1cf_2ec99cd2c2a1.slice/crio-6bb11fbf5185fe1ff2c2bfb1cde25581baa65e7c1f71c5141e4025dcfa02e9fe WatchSource:0}: Error finding container 6bb11fbf5185fe1ff2c2bfb1cde25581baa65e7c1f71c5141e4025dcfa02e9fe: Status 404 returned error can't find the container with id 6bb11fbf5185fe1ff2c2bfb1cde25581baa65e7c1f71c5141e4025dcfa02e9fe Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.879245 4813 generic.go:334] "Generic (PLEG): container finished" podID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerID="f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86" exitCode=0 Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.879301 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerDied","Data":"f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86"} Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.879345 4813 scope.go:117] "RemoveContainer" containerID="6ae7441f8930b87f906e801dc55dce71ed5b180dbc9b0bace9037674d73cac68" Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.882066 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1","Type":"ContainerStarted","Data":"6bb11fbf5185fe1ff2c2bfb1cde25581baa65e7c1f71c5141e4025dcfa02e9fe"} Oct 07 19:36:59 crc kubenswrapper[4813]: I1007 19:36:59.987100 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:37:00 crc kubenswrapper[4813]: I1007 19:37:00.893525 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1","Type":"ContainerStarted","Data":"6844379f64c1e36370494c13b839c972a7a1d9cd28fffbf87fa3ddf683259944"} Oct 07 19:37:00 crc kubenswrapper[4813]: I1007 19:37:00.893639 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 07 19:37:00 crc kubenswrapper[4813]: I1007 19:37:00.912592 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.9125737689999998 podStartE2EDuration="2.912573769s" podCreationTimestamp="2025-10-07 19:36:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:00.910578551 +0000 UTC m=+1146.988834172" watchObservedRunningTime="2025-10-07 19:37:00.912573769 +0000 UTC m=+1146.990829380" Oct 07 19:37:04 crc kubenswrapper[4813]: I1007 19:37:04.328725 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 07 19:37:04 crc kubenswrapper[4813]: I1007 19:37:04.841413 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-zc5wc"] Oct 07 19:37:04 crc kubenswrapper[4813]: I1007 19:37:04.843134 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:04 crc kubenswrapper[4813]: I1007 19:37:04.848918 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 07 19:37:04 crc kubenswrapper[4813]: I1007 19:37:04.852336 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 07 19:37:04 crc kubenswrapper[4813]: I1007 19:37:04.871008 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zc5wc"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.000139 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-scripts\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.000187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-config-data\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.000211 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6g5f\" (UniqueName: \"kubernetes.io/projected/f455f53a-b378-4366-bb40-4e155e06a6b4-kube-api-access-c6g5f\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.000356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.023736 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.024867 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.030630 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.035136 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101617 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-config-data\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101675 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101782 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101834 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-scripts\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101858 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-config-data\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101890 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6g5f\" (UniqueName: \"kubernetes.io/projected/f455f53a-b378-4366-bb40-4e155e06a6b4-kube-api-access-c6g5f\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.101926 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgbw9\" (UniqueName: \"kubernetes.io/projected/39885ebe-9384-4710-99d8-d5dedf7f9d0d-kube-api-access-wgbw9\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.111054 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.112410 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-scripts\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.128021 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-config-data\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.152463 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.153778 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.156376 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6g5f\" (UniqueName: \"kubernetes.io/projected/f455f53a-b378-4366-bb40-4e155e06a6b4-kube-api-access-c6g5f\") pod \"nova-cell0-cell-mapping-zc5wc\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.165871 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.174477 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.204077 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgbw9\" (UniqueName: \"kubernetes.io/projected/39885ebe-9384-4710-99d8-d5dedf7f9d0d-kube-api-access-wgbw9\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.204166 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-config-data\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.204196 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.213779 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-config-data\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.214420 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.275459 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.275597 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgbw9\" (UniqueName: \"kubernetes.io/projected/39885ebe-9384-4710-99d8-d5dedf7f9d0d-kube-api-access-wgbw9\") pod \"nova-scheduler-0\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.306002 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.306441 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d54pf\" (UniqueName: \"kubernetes.io/projected/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-kube-api-access-d54pf\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.307181 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.373831 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.412414 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.412506 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.412561 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d54pf\" (UniqueName: \"kubernetes.io/projected/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-kube-api-access-d54pf\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.431561 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.472295 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.479644 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d54pf\" (UniqueName: \"kubernetes.io/projected/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-kube-api-access-d54pf\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.595292 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.597117 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.600232 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.631289 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.633219 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.638427 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.658854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.678893 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.694892 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.727120 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85aade8a-78d5-431f-820e-71a6f7023f47-logs\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.727177 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr86l\" (UniqueName: \"kubernetes.io/projected/386bb6a1-9d50-455e-bb53-9979dafceaed-kube-api-access-rr86l\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.727257 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/386bb6a1-9d50-455e-bb53-9979dafceaed-logs\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.727296 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-config-data\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.731072 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-rjg4f"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.727338 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s2sx\" (UniqueName: \"kubernetes.io/projected/85aade8a-78d5-431f-820e-71a6f7023f47-kube-api-access-6s2sx\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.736908 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-config-data\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.736997 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.737091 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.743694 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.754737 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-rjg4f"] Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.838915 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85aade8a-78d5-431f-820e-71a6f7023f47-logs\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.838970 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7jq7\" (UniqueName: \"kubernetes.io/projected/4b3b876a-cd89-4a2c-8179-74c000809b17-kube-api-access-p7jq7\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.838992 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr86l\" (UniqueName: \"kubernetes.io/projected/386bb6a1-9d50-455e-bb53-9979dafceaed-kube-api-access-rr86l\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839056 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/386bb6a1-9d50-455e-bb53-9979dafceaed-logs\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839092 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-config-data\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839111 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-config\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839129 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s2sx\" (UniqueName: \"kubernetes.io/projected/85aade8a-78d5-431f-820e-71a6f7023f47-kube-api-access-6s2sx\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839162 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-config-data\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839182 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839205 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839231 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839261 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839300 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.839758 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85aade8a-78d5-431f-820e-71a6f7023f47-logs\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.840297 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/386bb6a1-9d50-455e-bb53-9979dafceaed-logs\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.849827 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.855122 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-config-data\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.861557 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-config-data\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.865562 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.867054 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr86l\" (UniqueName: \"kubernetes.io/projected/386bb6a1-9d50-455e-bb53-9979dafceaed-kube-api-access-rr86l\") pod \"nova-api-0\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.873196 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s2sx\" (UniqueName: \"kubernetes.io/projected/85aade8a-78d5-431f-820e-71a6f7023f47-kube-api-access-6s2sx\") pod \"nova-metadata-0\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " pod="openstack/nova-metadata-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.942009 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.943659 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.944547 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.944752 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7jq7\" (UniqueName: \"kubernetes.io/projected/4b3b876a-cd89-4a2c-8179-74c000809b17-kube-api-access-p7jq7\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.944886 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.945028 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-config\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.945702 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-config\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.943822 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.946341 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.942633 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.944468 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.947760 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:05 crc kubenswrapper[4813]: I1007 19:37:05.993752 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:06 crc kubenswrapper[4813]: I1007 19:37:06.034409 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7jq7\" (UniqueName: \"kubernetes.io/projected/4b3b876a-cd89-4a2c-8179-74c000809b17-kube-api-access-p7jq7\") pod \"dnsmasq-dns-845d6d6f59-rjg4f\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:06 crc kubenswrapper[4813]: I1007 19:37:06.101380 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:06 crc kubenswrapper[4813]: I1007 19:37:06.162208 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zc5wc"] Oct 07 19:37:06 crc kubenswrapper[4813]: I1007 19:37:06.498599 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:06 crc kubenswrapper[4813]: I1007 19:37:06.552021 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:06 crc kubenswrapper[4813]: I1007 19:37:06.843701 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.055013 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"386bb6a1-9d50-455e-bb53-9979dafceaed","Type":"ContainerStarted","Data":"cf5fa6d2acd5982dfee43286d66adb0ffdde3ad7a7ce1f60e5ad8c369ee2bc7b"} Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.065905 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"39885ebe-9384-4710-99d8-d5dedf7f9d0d","Type":"ContainerStarted","Data":"8a222b5c8e836e7097d52cc783439ec57a0d9ec3a98ddbe0e4a1a2f2e98f8cee"} Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.087392 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f","Type":"ContainerStarted","Data":"410f95a2eff463bae2ec8c4f3f70d04f7c8b72cbc6840e5aba719313c204304d"} Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.090033 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zc5wc" event={"ID":"f455f53a-b378-4366-bb40-4e155e06a6b4","Type":"ContainerStarted","Data":"a15afe4cc950228d4f306475b429262d93db356e0a2e29f5dccc366b0a7d6db2"} Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.090057 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zc5wc" event={"ID":"f455f53a-b378-4366-bb40-4e155e06a6b4","Type":"ContainerStarted","Data":"b84eb434851011542ff7ae9d1ea1986a024f9227f60595459c79325da6c7af0d"} Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.102029 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xdm2h"] Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.103749 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.112471 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xdm2h"] Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.123026 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.140121 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-zc5wc" podStartSLOduration=3.140107128 podStartE2EDuration="3.140107128s" podCreationTimestamp="2025-10-07 19:37:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:07.12335238 +0000 UTC m=+1153.201607991" watchObservedRunningTime="2025-10-07 19:37:07.140107128 +0000 UTC m=+1153.218362739" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.142013 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 07 19:37:07 crc kubenswrapper[4813]: W1007 19:37:07.156099 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85aade8a_78d5_431f_820e_71a6f7023f47.slice/crio-576366bdcddec5316c1f8a61151446b9aa92acfadca66992a3f7e20114c6f57e WatchSource:0}: Error finding container 576366bdcddec5316c1f8a61151446b9aa92acfadca66992a3f7e20114c6f57e: Status 404 returned error can't find the container with id 576366bdcddec5316c1f8a61151446b9aa92acfadca66992a3f7e20114c6f57e Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.158042 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.234297 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-config-data\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.234409 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.234442 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-scripts\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.234492 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkp8f\" (UniqueName: \"kubernetes.io/projected/2272b0c8-98de-47c8-9116-fff8e83fb1b1-kube-api-access-wkp8f\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.251226 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-rjg4f"] Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.336651 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-config-data\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.336754 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.336785 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-scripts\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.336813 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkp8f\" (UniqueName: \"kubernetes.io/projected/2272b0c8-98de-47c8-9116-fff8e83fb1b1-kube-api-access-wkp8f\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.342382 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-config-data\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.348768 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-scripts\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.356762 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.361864 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkp8f\" (UniqueName: \"kubernetes.io/projected/2272b0c8-98de-47c8-9116-fff8e83fb1b1-kube-api-access-wkp8f\") pod \"nova-cell1-conductor-db-sync-xdm2h\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.372246 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:07 crc kubenswrapper[4813]: I1007 19:37:07.902427 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xdm2h"] Oct 07 19:37:07 crc kubenswrapper[4813]: W1007 19:37:07.914524 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2272b0c8_98de_47c8_9116_fff8e83fb1b1.slice/crio-bb75d0c6ac6f19a16456f198c8af10ed6d639ee12fd45544e26a100fe044c5db WatchSource:0}: Error finding container bb75d0c6ac6f19a16456f198c8af10ed6d639ee12fd45544e26a100fe044c5db: Status 404 returned error can't find the container with id bb75d0c6ac6f19a16456f198c8af10ed6d639ee12fd45544e26a100fe044c5db Oct 07 19:37:08 crc kubenswrapper[4813]: I1007 19:37:08.114621 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" event={"ID":"2272b0c8-98de-47c8-9116-fff8e83fb1b1","Type":"ContainerStarted","Data":"bb75d0c6ac6f19a16456f198c8af10ed6d639ee12fd45544e26a100fe044c5db"} Oct 07 19:37:08 crc kubenswrapper[4813]: I1007 19:37:08.116584 4813 generic.go:334] "Generic (PLEG): container finished" podID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerID="7beb608e6dd5b9709919370e458097c003df8852d56ed0182b99e4e14bc81a42" exitCode=0 Oct 07 19:37:08 crc kubenswrapper[4813]: I1007 19:37:08.116622 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" event={"ID":"4b3b876a-cd89-4a2c-8179-74c000809b17","Type":"ContainerDied","Data":"7beb608e6dd5b9709919370e458097c003df8852d56ed0182b99e4e14bc81a42"} Oct 07 19:37:08 crc kubenswrapper[4813]: I1007 19:37:08.116635 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" event={"ID":"4b3b876a-cd89-4a2c-8179-74c000809b17","Type":"ContainerStarted","Data":"635aa09041cb2a05a8227eca9d077ef4ce7bb9be7dfca998f5ba24838f6250c4"} Oct 07 19:37:08 crc kubenswrapper[4813]: I1007 19:37:08.125405 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85aade8a-78d5-431f-820e-71a6f7023f47","Type":"ContainerStarted","Data":"576366bdcddec5316c1f8a61151446b9aa92acfadca66992a3f7e20114c6f57e"} Oct 07 19:37:09 crc kubenswrapper[4813]: I1007 19:37:09.135156 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" event={"ID":"2272b0c8-98de-47c8-9116-fff8e83fb1b1","Type":"ContainerStarted","Data":"5fdcf944836548978a1dc0dafcba17f57f5de9629251d04905344d40867f3a0f"} Oct 07 19:37:09 crc kubenswrapper[4813]: I1007 19:37:09.161912 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" podStartSLOduration=2.161890965 podStartE2EDuration="2.161890965s" podCreationTimestamp="2025-10-07 19:37:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:09.153943865 +0000 UTC m=+1155.232199476" watchObservedRunningTime="2025-10-07 19:37:09.161890965 +0000 UTC m=+1155.240146576" Oct 07 19:37:09 crc kubenswrapper[4813]: I1007 19:37:09.841290 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:09 crc kubenswrapper[4813]: I1007 19:37:09.855775 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:09 crc kubenswrapper[4813]: I1007 19:37:09.987152 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.175874 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f","Type":"ContainerStarted","Data":"d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.177242 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af" gracePeriod=30 Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.189272 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85aade8a-78d5-431f-820e-71a6f7023f47","Type":"ContainerStarted","Data":"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.189333 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85aade8a-78d5-431f-820e-71a6f7023f47","Type":"ContainerStarted","Data":"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.189791 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-log" containerID="cri-o://ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51" gracePeriod=30 Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.189816 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-metadata" containerID="cri-o://0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663" gracePeriod=30 Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.193861 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"386bb6a1-9d50-455e-bb53-9979dafceaed","Type":"ContainerStarted","Data":"f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.193922 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"386bb6a1-9d50-455e-bb53-9979dafceaed","Type":"ContainerStarted","Data":"5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.202104 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.039781219 podStartE2EDuration="7.202088627s" podCreationTimestamp="2025-10-07 19:37:05 +0000 UTC" firstStartedPulling="2025-10-07 19:37:06.573459296 +0000 UTC m=+1152.651714907" lastFinishedPulling="2025-10-07 19:37:10.735766704 +0000 UTC m=+1156.814022315" observedRunningTime="2025-10-07 19:37:12.193976051 +0000 UTC m=+1158.272231672" watchObservedRunningTime="2025-10-07 19:37:12.202088627 +0000 UTC m=+1158.280344238" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.203566 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" event={"ID":"4b3b876a-cd89-4a2c-8179-74c000809b17","Type":"ContainerStarted","Data":"8c53e55169f997bee4439bd5317a7b260c203ec4067146e865fdd672e844ec96"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.205987 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.207421 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"39885ebe-9384-4710-99d8-d5dedf7f9d0d","Type":"ContainerStarted","Data":"e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4"} Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.232767 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.653948702 podStartE2EDuration="7.232744788s" podCreationTimestamp="2025-10-07 19:37:05 +0000 UTC" firstStartedPulling="2025-10-07 19:37:07.163743025 +0000 UTC m=+1153.241998636" lastFinishedPulling="2025-10-07 19:37:10.742539111 +0000 UTC m=+1156.820794722" observedRunningTime="2025-10-07 19:37:12.223500289 +0000 UTC m=+1158.301755920" watchObservedRunningTime="2025-10-07 19:37:12.232744788 +0000 UTC m=+1158.311000409" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.271457 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" podStartSLOduration=7.271440113 podStartE2EDuration="7.271440113s" podCreationTimestamp="2025-10-07 19:37:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:12.269665761 +0000 UTC m=+1158.347921372" watchObservedRunningTime="2025-10-07 19:37:12.271440113 +0000 UTC m=+1158.349695724" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.278177 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.373767568 podStartE2EDuration="7.278166388s" podCreationTimestamp="2025-10-07 19:37:05 +0000 UTC" firstStartedPulling="2025-10-07 19:37:06.860520461 +0000 UTC m=+1152.938776072" lastFinishedPulling="2025-10-07 19:37:10.764919281 +0000 UTC m=+1156.843174892" observedRunningTime="2025-10-07 19:37:12.248082164 +0000 UTC m=+1158.326337775" watchObservedRunningTime="2025-10-07 19:37:12.278166388 +0000 UTC m=+1158.356421999" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.287336 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=4.11033198 podStartE2EDuration="8.287297114s" podCreationTimestamp="2025-10-07 19:37:04 +0000 UTC" firstStartedPulling="2025-10-07 19:37:06.585031673 +0000 UTC m=+1152.663287284" lastFinishedPulling="2025-10-07 19:37:10.761996807 +0000 UTC m=+1156.840252418" observedRunningTime="2025-10-07 19:37:12.285296855 +0000 UTC m=+1158.363552466" watchObservedRunningTime="2025-10-07 19:37:12.287297114 +0000 UTC m=+1158.365552725" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.794569 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.962729 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s2sx\" (UniqueName: \"kubernetes.io/projected/85aade8a-78d5-431f-820e-71a6f7023f47-kube-api-access-6s2sx\") pod \"85aade8a-78d5-431f-820e-71a6f7023f47\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.962817 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85aade8a-78d5-431f-820e-71a6f7023f47-logs\") pod \"85aade8a-78d5-431f-820e-71a6f7023f47\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.963045 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-combined-ca-bundle\") pod \"85aade8a-78d5-431f-820e-71a6f7023f47\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.963133 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-config-data\") pod \"85aade8a-78d5-431f-820e-71a6f7023f47\" (UID: \"85aade8a-78d5-431f-820e-71a6f7023f47\") " Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.963904 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85aade8a-78d5-431f-820e-71a6f7023f47-logs" (OuterVolumeSpecName: "logs") pod "85aade8a-78d5-431f-820e-71a6f7023f47" (UID: "85aade8a-78d5-431f-820e-71a6f7023f47"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:12 crc kubenswrapper[4813]: I1007 19:37:12.970575 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85aade8a-78d5-431f-820e-71a6f7023f47-kube-api-access-6s2sx" (OuterVolumeSpecName: "kube-api-access-6s2sx") pod "85aade8a-78d5-431f-820e-71a6f7023f47" (UID: "85aade8a-78d5-431f-820e-71a6f7023f47"). InnerVolumeSpecName "kube-api-access-6s2sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.010579 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-config-data" (OuterVolumeSpecName: "config-data") pod "85aade8a-78d5-431f-820e-71a6f7023f47" (UID: "85aade8a-78d5-431f-820e-71a6f7023f47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.042138 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85aade8a-78d5-431f-820e-71a6f7023f47" (UID: "85aade8a-78d5-431f-820e-71a6f7023f47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.065702 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.065789 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s2sx\" (UniqueName: \"kubernetes.io/projected/85aade8a-78d5-431f-820e-71a6f7023f47-kube-api-access-6s2sx\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.065807 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85aade8a-78d5-431f-820e-71a6f7023f47-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.065819 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85aade8a-78d5-431f-820e-71a6f7023f47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221261 4813 generic.go:334] "Generic (PLEG): container finished" podID="85aade8a-78d5-431f-820e-71a6f7023f47" containerID="0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663" exitCode=0 Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221300 4813 generic.go:334] "Generic (PLEG): container finished" podID="85aade8a-78d5-431f-820e-71a6f7023f47" containerID="ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51" exitCode=143 Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221416 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221469 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85aade8a-78d5-431f-820e-71a6f7023f47","Type":"ContainerDied","Data":"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663"} Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221605 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85aade8a-78d5-431f-820e-71a6f7023f47","Type":"ContainerDied","Data":"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51"} Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221624 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"85aade8a-78d5-431f-820e-71a6f7023f47","Type":"ContainerDied","Data":"576366bdcddec5316c1f8a61151446b9aa92acfadca66992a3f7e20114c6f57e"} Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.221638 4813 scope.go:117] "RemoveContainer" containerID="0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.266878 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.278857 4813 scope.go:117] "RemoveContainer" containerID="ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.282391 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.287671 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:13 crc kubenswrapper[4813]: E1007 19:37:13.309013 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-log" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.309052 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-log" Oct 07 19:37:13 crc kubenswrapper[4813]: E1007 19:37:13.309087 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-metadata" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.309097 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-metadata" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.309423 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-log" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.309445 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" containerName="nova-metadata-metadata" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.310427 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.310512 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.322110 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.322356 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.322477 4813 scope.go:117] "RemoveContainer" containerID="0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663" Oct 07 19:37:13 crc kubenswrapper[4813]: E1007 19:37:13.325605 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663\": container with ID starting with 0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663 not found: ID does not exist" containerID="0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.325646 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663"} err="failed to get container status \"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663\": rpc error: code = NotFound desc = could not find container \"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663\": container with ID starting with 0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663 not found: ID does not exist" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.325673 4813 scope.go:117] "RemoveContainer" containerID="ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51" Oct 07 19:37:13 crc kubenswrapper[4813]: E1007 19:37:13.327405 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51\": container with ID starting with ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51 not found: ID does not exist" containerID="ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.327430 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51"} err="failed to get container status \"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51\": rpc error: code = NotFound desc = could not find container \"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51\": container with ID starting with ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51 not found: ID does not exist" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.327445 4813 scope.go:117] "RemoveContainer" containerID="0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.332582 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663"} err="failed to get container status \"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663\": rpc error: code = NotFound desc = could not find container \"0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663\": container with ID starting with 0f4aec824e3b19ce6efee1c4cb8da1c6ec8fc0179c4c93e46dce4547db84e663 not found: ID does not exist" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.332642 4813 scope.go:117] "RemoveContainer" containerID="ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.334701 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51"} err="failed to get container status \"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51\": rpc error: code = NotFound desc = could not find container \"ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51\": container with ID starting with ff07f3c9470213e388b87f7b2ba96edce3b59a37926ad2cedb02411c29211c51 not found: ID does not exist" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.477267 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-config-data\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.477553 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9q2v\" (UniqueName: \"kubernetes.io/projected/63c7097a-f10e-4f19-9d6a-a7936b41824c-kube-api-access-r9q2v\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.477622 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.477828 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.477869 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63c7097a-f10e-4f19-9d6a-a7936b41824c-logs\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.579492 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-config-data\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.579652 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9q2v\" (UniqueName: \"kubernetes.io/projected/63c7097a-f10e-4f19-9d6a-a7936b41824c-kube-api-access-r9q2v\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.579688 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.580103 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.580124 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63c7097a-f10e-4f19-9d6a-a7936b41824c-logs\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.580734 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63c7097a-f10e-4f19-9d6a-a7936b41824c-logs\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.585095 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.585694 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-config-data\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.596734 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.598882 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9q2v\" (UniqueName: \"kubernetes.io/projected/63c7097a-f10e-4f19-9d6a-a7936b41824c-kube-api-access-r9q2v\") pod \"nova-metadata-0\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " pod="openstack/nova-metadata-0" Oct 07 19:37:13 crc kubenswrapper[4813]: I1007 19:37:13.643125 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:14 crc kubenswrapper[4813]: I1007 19:37:14.106176 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:14 crc kubenswrapper[4813]: I1007 19:37:14.240785 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63c7097a-f10e-4f19-9d6a-a7936b41824c","Type":"ContainerStarted","Data":"5a2bf256bea9e2f6088dee07d79c9fe0e830d0096ae5aeb50612baffaebaa2ee"} Oct 07 19:37:14 crc kubenswrapper[4813]: I1007 19:37:14.620483 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85aade8a-78d5-431f-820e-71a6f7023f47" path="/var/lib/kubelet/pods/85aade8a-78d5-431f-820e-71a6f7023f47/volumes" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.253533 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63c7097a-f10e-4f19-9d6a-a7936b41824c","Type":"ContainerStarted","Data":"d94c4726260f85715c36f619a77a98ca757042114cad743a0369c33a03d41c64"} Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.255504 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63c7097a-f10e-4f19-9d6a-a7936b41824c","Type":"ContainerStarted","Data":"1016f580b235500e6a79bb87ee6d97e46cba89703e5e6fea35fcb26f512ea56f"} Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.285829 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.285804922 podStartE2EDuration="2.285804922s" podCreationTimestamp="2025-10-07 19:37:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:15.278123849 +0000 UTC m=+1161.356379460" watchObservedRunningTime="2025-10-07 19:37:15.285804922 +0000 UTC m=+1161.364060533" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.375601 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.375871 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.439783 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.695946 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.943608 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 19:37:15 crc kubenswrapper[4813]: I1007 19:37:15.943656 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.103968 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.178184 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-5qnsr"] Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.178507 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="dnsmasq-dns" containerID="cri-o://c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca" gracePeriod=10 Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.280621 4813 generic.go:334] "Generic (PLEG): container finished" podID="f455f53a-b378-4366-bb40-4e155e06a6b4" containerID="a15afe4cc950228d4f306475b429262d93db356e0a2e29f5dccc366b0a7d6db2" exitCode=0 Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.281799 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zc5wc" event={"ID":"f455f53a-b378-4366-bb40-4e155e06a6b4","Type":"ContainerDied","Data":"a15afe4cc950228d4f306475b429262d93db356e0a2e29f5dccc366b0a7d6db2"} Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.374617 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.808427 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.957642 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-sb\") pod \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.957702 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-swift-storage-0\") pod \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.957757 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-nb\") pod \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.958474 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-config\") pod \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.958537 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-svc\") pod \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.958629 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngmdc\" (UniqueName: \"kubernetes.io/projected/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-kube-api-access-ngmdc\") pod \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\" (UID: \"2c6518c9-a69b-4270-8fd8-a7f55eacfce7\") " Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.967566 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-kube-api-access-ngmdc" (OuterVolumeSpecName: "kube-api-access-ngmdc") pod "2c6518c9-a69b-4270-8fd8-a7f55eacfce7" (UID: "2c6518c9-a69b-4270-8fd8-a7f55eacfce7"). InnerVolumeSpecName "kube-api-access-ngmdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.984591 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 19:37:16 crc kubenswrapper[4813]: I1007 19:37:16.985219 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.055749 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2c6518c9-a69b-4270-8fd8-a7f55eacfce7" (UID: "2c6518c9-a69b-4270-8fd8-a7f55eacfce7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.062160 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngmdc\" (UniqueName: \"kubernetes.io/projected/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-kube-api-access-ngmdc\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.062190 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.062350 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c6518c9-a69b-4270-8fd8-a7f55eacfce7" (UID: "2c6518c9-a69b-4270-8fd8-a7f55eacfce7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.081985 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-config" (OuterVolumeSpecName: "config") pod "2c6518c9-a69b-4270-8fd8-a7f55eacfce7" (UID: "2c6518c9-a69b-4270-8fd8-a7f55eacfce7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.089058 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c6518c9-a69b-4270-8fd8-a7f55eacfce7" (UID: "2c6518c9-a69b-4270-8fd8-a7f55eacfce7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.095291 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2c6518c9-a69b-4270-8fd8-a7f55eacfce7" (UID: "2c6518c9-a69b-4270-8fd8-a7f55eacfce7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.166112 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.166151 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.166162 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.166171 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c6518c9-a69b-4270-8fd8-a7f55eacfce7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.292371 4813 generic.go:334] "Generic (PLEG): container finished" podID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerID="c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca" exitCode=0 Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.292449 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.292459 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" event={"ID":"2c6518c9-a69b-4270-8fd8-a7f55eacfce7","Type":"ContainerDied","Data":"c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca"} Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.293493 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" event={"ID":"2c6518c9-a69b-4270-8fd8-a7f55eacfce7","Type":"ContainerDied","Data":"d48f1a13549cf87ef169897a69c6f6d7b84419bea587188a0c74289c9b15e173"} Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.293522 4813 scope.go:117] "RemoveContainer" containerID="c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.387314 4813 scope.go:117] "RemoveContainer" containerID="abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.413706 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-5qnsr"] Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.439565 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-5qnsr"] Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.451775 4813 scope.go:117] "RemoveContainer" containerID="c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca" Oct 07 19:37:17 crc kubenswrapper[4813]: E1007 19:37:17.453514 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca\": container with ID starting with c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca not found: ID does not exist" containerID="c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.453581 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca"} err="failed to get container status \"c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca\": rpc error: code = NotFound desc = could not find container \"c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca\": container with ID starting with c7bc2ca34632798d7f10ce97ed1b7e2914b5204e5a23b6a25345cf46604482ca not found: ID does not exist" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.453618 4813 scope.go:117] "RemoveContainer" containerID="abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59" Oct 07 19:37:17 crc kubenswrapper[4813]: E1007 19:37:17.457104 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59\": container with ID starting with abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59 not found: ID does not exist" containerID="abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.457284 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59"} err="failed to get container status \"abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59\": rpc error: code = NotFound desc = could not find container \"abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59\": container with ID starting with abba73818d013f2d773d820ea73a91be17bb161985e588120f29dc6c866aeb59 not found: ID does not exist" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.789762 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.880919 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-config-data\") pod \"f455f53a-b378-4366-bb40-4e155e06a6b4\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.881082 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-scripts\") pod \"f455f53a-b378-4366-bb40-4e155e06a6b4\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.881127 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-combined-ca-bundle\") pod \"f455f53a-b378-4366-bb40-4e155e06a6b4\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.881159 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6g5f\" (UniqueName: \"kubernetes.io/projected/f455f53a-b378-4366-bb40-4e155e06a6b4-kube-api-access-c6g5f\") pod \"f455f53a-b378-4366-bb40-4e155e06a6b4\" (UID: \"f455f53a-b378-4366-bb40-4e155e06a6b4\") " Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.890569 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-scripts" (OuterVolumeSpecName: "scripts") pod "f455f53a-b378-4366-bb40-4e155e06a6b4" (UID: "f455f53a-b378-4366-bb40-4e155e06a6b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.896587 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f455f53a-b378-4366-bb40-4e155e06a6b4-kube-api-access-c6g5f" (OuterVolumeSpecName: "kube-api-access-c6g5f") pod "f455f53a-b378-4366-bb40-4e155e06a6b4" (UID: "f455f53a-b378-4366-bb40-4e155e06a6b4"). InnerVolumeSpecName "kube-api-access-c6g5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.914442 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f455f53a-b378-4366-bb40-4e155e06a6b4" (UID: "f455f53a-b378-4366-bb40-4e155e06a6b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.951471 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-config-data" (OuterVolumeSpecName: "config-data") pod "f455f53a-b378-4366-bb40-4e155e06a6b4" (UID: "f455f53a-b378-4366-bb40-4e155e06a6b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.982377 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.982418 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.982430 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6g5f\" (UniqueName: \"kubernetes.io/projected/f455f53a-b378-4366-bb40-4e155e06a6b4-kube-api-access-c6g5f\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:17 crc kubenswrapper[4813]: I1007 19:37:17.982439 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f455f53a-b378-4366-bb40-4e155e06a6b4-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.307833 4813 generic.go:334] "Generic (PLEG): container finished" podID="2272b0c8-98de-47c8-9116-fff8e83fb1b1" containerID="5fdcf944836548978a1dc0dafcba17f57f5de9629251d04905344d40867f3a0f" exitCode=0 Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.308268 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" event={"ID":"2272b0c8-98de-47c8-9116-fff8e83fb1b1","Type":"ContainerDied","Data":"5fdcf944836548978a1dc0dafcba17f57f5de9629251d04905344d40867f3a0f"} Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.311078 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zc5wc" event={"ID":"f455f53a-b378-4366-bb40-4e155e06a6b4","Type":"ContainerDied","Data":"b84eb434851011542ff7ae9d1ea1986a024f9227f60595459c79325da6c7af0d"} Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.311133 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b84eb434851011542ff7ae9d1ea1986a024f9227f60595459c79325da6c7af0d" Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.311223 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zc5wc" Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.512205 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.512632 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-api" containerID="cri-o://f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470" gracePeriod=30 Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.512495 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-log" containerID="cri-o://5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013" gracePeriod=30 Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.528975 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.529179 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" containerName="nova-scheduler-scheduler" containerID="cri-o://e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4" gracePeriod=30 Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.576803 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.577117 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-log" containerID="cri-o://1016f580b235500e6a79bb87ee6d97e46cba89703e5e6fea35fcb26f512ea56f" gracePeriod=30 Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.577191 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-metadata" containerID="cri-o://d94c4726260f85715c36f619a77a98ca757042114cad743a0369c33a03d41c64" gracePeriod=30 Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.615180 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" path="/var/lib/kubelet/pods/2c6518c9-a69b-4270-8fd8-a7f55eacfce7/volumes" Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.643397 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 19:37:18 crc kubenswrapper[4813]: I1007 19:37:18.643480 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.383312 4813 generic.go:334] "Generic (PLEG): container finished" podID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerID="5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013" exitCode=143 Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.383569 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"386bb6a1-9d50-455e-bb53-9979dafceaed","Type":"ContainerDied","Data":"5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013"} Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.400562 4813 generic.go:334] "Generic (PLEG): container finished" podID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerID="d94c4726260f85715c36f619a77a98ca757042114cad743a0369c33a03d41c64" exitCode=0 Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.400590 4813 generic.go:334] "Generic (PLEG): container finished" podID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerID="1016f580b235500e6a79bb87ee6d97e46cba89703e5e6fea35fcb26f512ea56f" exitCode=143 Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.400671 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63c7097a-f10e-4f19-9d6a-a7936b41824c","Type":"ContainerDied","Data":"d94c4726260f85715c36f619a77a98ca757042114cad743a0369c33a03d41c64"} Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.400718 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63c7097a-f10e-4f19-9d6a-a7936b41824c","Type":"ContainerDied","Data":"1016f580b235500e6a79bb87ee6d97e46cba89703e5e6fea35fcb26f512ea56f"} Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.692229 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.726641 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9q2v\" (UniqueName: \"kubernetes.io/projected/63c7097a-f10e-4f19-9d6a-a7936b41824c-kube-api-access-r9q2v\") pod \"63c7097a-f10e-4f19-9d6a-a7936b41824c\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.726741 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-nova-metadata-tls-certs\") pod \"63c7097a-f10e-4f19-9d6a-a7936b41824c\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.726793 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-combined-ca-bundle\") pod \"63c7097a-f10e-4f19-9d6a-a7936b41824c\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.726849 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-config-data\") pod \"63c7097a-f10e-4f19-9d6a-a7936b41824c\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.726910 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63c7097a-f10e-4f19-9d6a-a7936b41824c-logs\") pod \"63c7097a-f10e-4f19-9d6a-a7936b41824c\" (UID: \"63c7097a-f10e-4f19-9d6a-a7936b41824c\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.728182 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63c7097a-f10e-4f19-9d6a-a7936b41824c-logs" (OuterVolumeSpecName: "logs") pod "63c7097a-f10e-4f19-9d6a-a7936b41824c" (UID: "63c7097a-f10e-4f19-9d6a-a7936b41824c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.748556 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63c7097a-f10e-4f19-9d6a-a7936b41824c-kube-api-access-r9q2v" (OuterVolumeSpecName: "kube-api-access-r9q2v") pod "63c7097a-f10e-4f19-9d6a-a7936b41824c" (UID: "63c7097a-f10e-4f19-9d6a-a7936b41824c"). InnerVolumeSpecName "kube-api-access-r9q2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.779518 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63c7097a-f10e-4f19-9d6a-a7936b41824c" (UID: "63c7097a-f10e-4f19-9d6a-a7936b41824c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.786458 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-config-data" (OuterVolumeSpecName: "config-data") pod "63c7097a-f10e-4f19-9d6a-a7936b41824c" (UID: "63c7097a-f10e-4f19-9d6a-a7936b41824c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.834090 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.834117 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.834126 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63c7097a-f10e-4f19-9d6a-a7936b41824c-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.834135 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9q2v\" (UniqueName: \"kubernetes.io/projected/63c7097a-f10e-4f19-9d6a-a7936b41824c-kube-api-access-r9q2v\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.834927 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "63c7097a-f10e-4f19-9d6a-a7936b41824c" (UID: "63c7097a-f10e-4f19-9d6a-a7936b41824c"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.867503 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.935087 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-combined-ca-bundle\") pod \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.935404 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-config-data\") pod \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.935497 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkp8f\" (UniqueName: \"kubernetes.io/projected/2272b0c8-98de-47c8-9116-fff8e83fb1b1-kube-api-access-wkp8f\") pod \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.935573 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-scripts\") pod \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\" (UID: \"2272b0c8-98de-47c8-9116-fff8e83fb1b1\") " Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.935878 4813 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/63c7097a-f10e-4f19-9d6a-a7936b41824c-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.941478 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2272b0c8-98de-47c8-9116-fff8e83fb1b1-kube-api-access-wkp8f" (OuterVolumeSpecName: "kube-api-access-wkp8f") pod "2272b0c8-98de-47c8-9116-fff8e83fb1b1" (UID: "2272b0c8-98de-47c8-9116-fff8e83fb1b1"). InnerVolumeSpecName "kube-api-access-wkp8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.941784 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-scripts" (OuterVolumeSpecName: "scripts") pod "2272b0c8-98de-47c8-9116-fff8e83fb1b1" (UID: "2272b0c8-98de-47c8-9116-fff8e83fb1b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.963573 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2272b0c8-98de-47c8-9116-fff8e83fb1b1" (UID: "2272b0c8-98de-47c8-9116-fff8e83fb1b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.963655 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-config-data" (OuterVolumeSpecName: "config-data") pod "2272b0c8-98de-47c8-9116-fff8e83fb1b1" (UID: "2272b0c8-98de-47c8-9116-fff8e83fb1b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.986828 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-688984b46d-g79nd" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 07 19:37:19 crc kubenswrapper[4813]: I1007 19:37:19.986939 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.036759 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.036792 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.036803 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2272b0c8-98de-47c8-9116-fff8e83fb1b1-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.036811 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkp8f\" (UniqueName: \"kubernetes.io/projected/2272b0c8-98de-47c8-9116-fff8e83fb1b1-kube-api-access-wkp8f\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.379656 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.385621 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.387400 4813 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.387477 4813 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" containerName="nova-scheduler-scheduler" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.426413 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" event={"ID":"2272b0c8-98de-47c8-9116-fff8e83fb1b1","Type":"ContainerDied","Data":"bb75d0c6ac6f19a16456f198c8af10ed6d639ee12fd45544e26a100fe044c5db"} Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.426460 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb75d0c6ac6f19a16456f198c8af10ed6d639ee12fd45544e26a100fe044c5db" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.426538 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-xdm2h" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.437197 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"63c7097a-f10e-4f19-9d6a-a7936b41824c","Type":"ContainerDied","Data":"5a2bf256bea9e2f6088dee07d79c9fe0e830d0096ae5aeb50612baffaebaa2ee"} Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.437272 4813 scope.go:117] "RemoveContainer" containerID="d94c4726260f85715c36f619a77a98ca757042114cad743a0369c33a03d41c64" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.437495 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.457733 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.458382 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-metadata" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458409 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-metadata" Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.458425 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="dnsmasq-dns" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458436 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="dnsmasq-dns" Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.458463 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2272b0c8-98de-47c8-9116-fff8e83fb1b1" containerName="nova-cell1-conductor-db-sync" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458472 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2272b0c8-98de-47c8-9116-fff8e83fb1b1" containerName="nova-cell1-conductor-db-sync" Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.458482 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-log" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458490 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-log" Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.458507 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="init" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458515 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="init" Oct 07 19:37:20 crc kubenswrapper[4813]: E1007 19:37:20.458582 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f455f53a-b378-4366-bb40-4e155e06a6b4" containerName="nova-manage" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458594 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f455f53a-b378-4366-bb40-4e155e06a6b4" containerName="nova-manage" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458816 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="dnsmasq-dns" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458833 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-log" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458849 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f455f53a-b378-4366-bb40-4e155e06a6b4" containerName="nova-manage" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458866 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" containerName="nova-metadata-metadata" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.458887 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="2272b0c8-98de-47c8-9116-fff8e83fb1b1" containerName="nova-cell1-conductor-db-sync" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.459782 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.462454 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.500649 4813 scope.go:117] "RemoveContainer" containerID="1016f580b235500e6a79bb87ee6d97e46cba89703e5e6fea35fcb26f512ea56f" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.523230 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.568599 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.581929 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.589516 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.591624 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.593571 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.594899 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.616544 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63c7097a-f10e-4f19-9d6a-a7936b41824c" path="/var/lib/kubelet/pods/63c7097a-f10e-4f19-9d6a-a7936b41824c/volumes" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.617265 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.654004 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2536ca60-1d39-40b9-a15b-708804ec9fa5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.654225 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg65r\" (UniqueName: \"kubernetes.io/projected/2536ca60-1d39-40b9-a15b-708804ec9fa5-kube-api-access-sg65r\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.654573 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2536ca60-1d39-40b9-a15b-708804ec9fa5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.756863 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2536ca60-1d39-40b9-a15b-708804ec9fa5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.756940 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.757023 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.757092 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2536ca60-1d39-40b9-a15b-708804ec9fa5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.757276 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21847ab9-4438-4203-ba7e-de3231e5e3ba-logs\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.757417 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57wk5\" (UniqueName: \"kubernetes.io/projected/21847ab9-4438-4203-ba7e-de3231e5e3ba-kube-api-access-57wk5\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.757464 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg65r\" (UniqueName: \"kubernetes.io/projected/2536ca60-1d39-40b9-a15b-708804ec9fa5-kube-api-access-sg65r\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.757523 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-config-data\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.765269 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2536ca60-1d39-40b9-a15b-708804ec9fa5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.765392 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2536ca60-1d39-40b9-a15b-708804ec9fa5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.778014 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg65r\" (UniqueName: \"kubernetes.io/projected/2536ca60-1d39-40b9-a15b-708804ec9fa5-kube-api-access-sg65r\") pod \"nova-cell1-conductor-0\" (UID: \"2536ca60-1d39-40b9-a15b-708804ec9fa5\") " pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.841443 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.860094 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.860260 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21847ab9-4438-4203-ba7e-de3231e5e3ba-logs\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.860357 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57wk5\" (UniqueName: \"kubernetes.io/projected/21847ab9-4438-4203-ba7e-de3231e5e3ba-kube-api-access-57wk5\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.860425 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-config-data\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.860559 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.860687 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21847ab9-4438-4203-ba7e-de3231e5e3ba-logs\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.866938 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.867917 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-config-data\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.875959 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.892259 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57wk5\" (UniqueName: \"kubernetes.io/projected/21847ab9-4438-4203-ba7e-de3231e5e3ba-kube-api-access-57wk5\") pod \"nova-metadata-0\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " pod="openstack/nova-metadata-0" Oct 07 19:37:20 crc kubenswrapper[4813]: I1007 19:37:20.951395 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:37:21 crc kubenswrapper[4813]: I1007 19:37:21.382101 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 07 19:37:21 crc kubenswrapper[4813]: W1007 19:37:21.383959 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2536ca60_1d39_40b9_a15b_708804ec9fa5.slice/crio-552103ccdb67aca6e0d7b423a5d7143ec47a051a55effc545b325a9cc1a50af3 WatchSource:0}: Error finding container 552103ccdb67aca6e0d7b423a5d7143ec47a051a55effc545b325a9cc1a50af3: Status 404 returned error can't find the container with id 552103ccdb67aca6e0d7b423a5d7143ec47a051a55effc545b325a9cc1a50af3 Oct 07 19:37:21 crc kubenswrapper[4813]: I1007 19:37:21.462944 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:37:21 crc kubenswrapper[4813]: I1007 19:37:21.463216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2536ca60-1d39-40b9-a15b-708804ec9fa5","Type":"ContainerStarted","Data":"552103ccdb67aca6e0d7b423a5d7143ec47a051a55effc545b325a9cc1a50af3"} Oct 07 19:37:21 crc kubenswrapper[4813]: W1007 19:37:21.471715 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21847ab9_4438_4203_ba7e_de3231e5e3ba.slice/crio-8f178d168dfa4bae3575d39fc22e958df2a386315028b2dce134cb41bdc65968 WatchSource:0}: Error finding container 8f178d168dfa4bae3575d39fc22e958df2a386315028b2dce134cb41bdc65968: Status 404 returned error can't find the container with id 8f178d168dfa4bae3575d39fc22e958df2a386315028b2dce134cb41bdc65968 Oct 07 19:37:21 crc kubenswrapper[4813]: I1007 19:37:21.521183 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 19:37:21 crc kubenswrapper[4813]: I1007 19:37:21.621952 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5784cf869f-5qnsr" podUID="2c6518c9-a69b-4270-8fd8-a7f55eacfce7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.166:5353: i/o timeout" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.079057 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.079443 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.079778 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.080695 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.080772 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59" gracePeriod=600 Oct 07 19:37:22 crc kubenswrapper[4813]: E1007 19:37:22.189124 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod537f8a53_dde4_4808_a822_9d8c922a8499.slice/crio-conmon-0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59.scope\": RecentStats: unable to find data in memory cache]" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.481753 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21847ab9-4438-4203-ba7e-de3231e5e3ba","Type":"ContainerStarted","Data":"3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e"} Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.483717 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21847ab9-4438-4203-ba7e-de3231e5e3ba","Type":"ContainerStarted","Data":"721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327"} Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.483769 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21847ab9-4438-4203-ba7e-de3231e5e3ba","Type":"ContainerStarted","Data":"8f178d168dfa4bae3575d39fc22e958df2a386315028b2dce134cb41bdc65968"} Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.487216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"2536ca60-1d39-40b9-a15b-708804ec9fa5","Type":"ContainerStarted","Data":"49c234403a1634cc7d82ac60e876a9bcd5bf245d5829fc04620fb36fdf478e62"} Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.487589 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.492552 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59" exitCode=0 Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.492578 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59"} Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.492607 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"a168d0f1d4ea1589207def16c70ca26d39123d2f686ab970a58e0248c2c0905b"} Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.492625 4813 scope.go:117] "RemoveContainer" containerID="c4e3a874402bcde4b4b4d8190142ef2959a5d27f6fb1ca4f9803d48de7b2c187" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.509665 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.509638441 podStartE2EDuration="2.509638441s" podCreationTimestamp="2025-10-07 19:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:22.508187479 +0000 UTC m=+1168.586443090" watchObservedRunningTime="2025-10-07 19:37:22.509638441 +0000 UTC m=+1168.587894092" Oct 07 19:37:22 crc kubenswrapper[4813]: I1007 19:37:22.540308 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.540282863 podStartE2EDuration="2.540282863s" podCreationTimestamp="2025-10-07 19:37:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:22.529949212 +0000 UTC m=+1168.608204863" watchObservedRunningTime="2025-10-07 19:37:22.540282863 +0000 UTC m=+1168.618538494" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.506714 4813 generic.go:334] "Generic (PLEG): container finished" podID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" containerID="e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4" exitCode=0 Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.506818 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"39885ebe-9384-4710-99d8-d5dedf7f9d0d","Type":"ContainerDied","Data":"e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4"} Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.507588 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"39885ebe-9384-4710-99d8-d5dedf7f9d0d","Type":"ContainerDied","Data":"8a222b5c8e836e7097d52cc783439ec57a0d9ec3a98ddbe0e4a1a2f2e98f8cee"} Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.507607 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a222b5c8e836e7097d52cc783439ec57a0d9ec3a98ddbe0e4a1a2f2e98f8cee" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.564603 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.744612 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-config-data\") pod \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.744777 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgbw9\" (UniqueName: \"kubernetes.io/projected/39885ebe-9384-4710-99d8-d5dedf7f9d0d-kube-api-access-wgbw9\") pod \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.744871 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-combined-ca-bundle\") pod \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\" (UID: \"39885ebe-9384-4710-99d8-d5dedf7f9d0d\") " Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.771018 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39885ebe-9384-4710-99d8-d5dedf7f9d0d-kube-api-access-wgbw9" (OuterVolumeSpecName: "kube-api-access-wgbw9") pod "39885ebe-9384-4710-99d8-d5dedf7f9d0d" (UID: "39885ebe-9384-4710-99d8-d5dedf7f9d0d"). InnerVolumeSpecName "kube-api-access-wgbw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.783163 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39885ebe-9384-4710-99d8-d5dedf7f9d0d" (UID: "39885ebe-9384-4710-99d8-d5dedf7f9d0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.798016 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-config-data" (OuterVolumeSpecName: "config-data") pod "39885ebe-9384-4710-99d8-d5dedf7f9d0d" (UID: "39885ebe-9384-4710-99d8-d5dedf7f9d0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.847715 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.847746 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgbw9\" (UniqueName: \"kubernetes.io/projected/39885ebe-9384-4710-99d8-d5dedf7f9d0d-kube-api-access-wgbw9\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:23 crc kubenswrapper[4813]: I1007 19:37:23.847759 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39885ebe-9384-4710-99d8-d5dedf7f9d0d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.420007 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.517368 4813 generic.go:334] "Generic (PLEG): container finished" podID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerID="f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470" exitCode=0 Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.517443 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.517653 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.518685 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"386bb6a1-9d50-455e-bb53-9979dafceaed","Type":"ContainerDied","Data":"f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470"} Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.518747 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"386bb6a1-9d50-455e-bb53-9979dafceaed","Type":"ContainerDied","Data":"cf5fa6d2acd5982dfee43286d66adb0ffdde3ad7a7ce1f60e5ad8c369ee2bc7b"} Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.518764 4813 scope.go:117] "RemoveContainer" containerID="f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.539342 4813 scope.go:117] "RemoveContainer" containerID="5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.552167 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.558055 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/386bb6a1-9d50-455e-bb53-9979dafceaed-logs\") pod \"386bb6a1-9d50-455e-bb53-9979dafceaed\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.558468 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-config-data\") pod \"386bb6a1-9d50-455e-bb53-9979dafceaed\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.558652 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr86l\" (UniqueName: \"kubernetes.io/projected/386bb6a1-9d50-455e-bb53-9979dafceaed-kube-api-access-rr86l\") pod \"386bb6a1-9d50-455e-bb53-9979dafceaed\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.558718 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/386bb6a1-9d50-455e-bb53-9979dafceaed-logs" (OuterVolumeSpecName: "logs") pod "386bb6a1-9d50-455e-bb53-9979dafceaed" (UID: "386bb6a1-9d50-455e-bb53-9979dafceaed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.558896 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-combined-ca-bundle\") pod \"386bb6a1-9d50-455e-bb53-9979dafceaed\" (UID: \"386bb6a1-9d50-455e-bb53-9979dafceaed\") " Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.559313 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/386bb6a1-9d50-455e-bb53-9979dafceaed-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.561279 4813 scope.go:117] "RemoveContainer" containerID="f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470" Oct 07 19:37:24 crc kubenswrapper[4813]: E1007 19:37:24.566455 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470\": container with ID starting with f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470 not found: ID does not exist" containerID="f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.566495 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470"} err="failed to get container status \"f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470\": rpc error: code = NotFound desc = could not find container \"f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470\": container with ID starting with f9e73b9a17873baf478047cb7aeb7a961d50bca16f2748c06228467605db6470 not found: ID does not exist" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.566531 4813 scope.go:117] "RemoveContainer" containerID="5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013" Oct 07 19:37:24 crc kubenswrapper[4813]: E1007 19:37:24.566891 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013\": container with ID starting with 5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013 not found: ID does not exist" containerID="5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.566924 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013"} err="failed to get container status \"5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013\": rpc error: code = NotFound desc = could not find container \"5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013\": container with ID starting with 5fa600aef82a0da386541c6d8669fc566b5fa1b99d0b6901f7465e4c9f3c3013 not found: ID does not exist" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.580560 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386bb6a1-9d50-455e-bb53-9979dafceaed-kube-api-access-rr86l" (OuterVolumeSpecName: "kube-api-access-rr86l") pod "386bb6a1-9d50-455e-bb53-9979dafceaed" (UID: "386bb6a1-9d50-455e-bb53-9979dafceaed"). InnerVolumeSpecName "kube-api-access-rr86l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.581009 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.595170 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: E1007 19:37:24.595556 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-log" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.595572 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-log" Oct 07 19:37:24 crc kubenswrapper[4813]: E1007 19:37:24.595614 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-api" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.595621 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-api" Oct 07 19:37:24 crc kubenswrapper[4813]: E1007 19:37:24.595629 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" containerName="nova-scheduler-scheduler" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.595636 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" containerName="nova-scheduler-scheduler" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.596689 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-log" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.596723 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" containerName="nova-scheduler-scheduler" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.596734 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" containerName="nova-api-api" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.597621 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.610177 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.625622 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39885ebe-9384-4710-99d8-d5dedf7f9d0d" path="/var/lib/kubelet/pods/39885ebe-9384-4710-99d8-d5dedf7f9d0d/volumes" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.628706 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.641194 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "386bb6a1-9d50-455e-bb53-9979dafceaed" (UID: "386bb6a1-9d50-455e-bb53-9979dafceaed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.641591 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-config-data" (OuterVolumeSpecName: "config-data") pod "386bb6a1-9d50-455e-bb53-9979dafceaed" (UID: "386bb6a1-9d50-455e-bb53-9979dafceaed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.660975 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.661295 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/386bb6a1-9d50-455e-bb53-9979dafceaed-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.661314 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr86l\" (UniqueName: \"kubernetes.io/projected/386bb6a1-9d50-455e-bb53-9979dafceaed-kube-api-access-rr86l\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.763249 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb8mr\" (UniqueName: \"kubernetes.io/projected/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-kube-api-access-gb8mr\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.763311 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.763445 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-config-data\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.848244 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.858966 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.869010 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb8mr\" (UniqueName: \"kubernetes.io/projected/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-kube-api-access-gb8mr\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.869103 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.869230 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-config-data\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.870386 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.871945 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.873851 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.877095 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.890671 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-config-data\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.890756 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb8mr\" (UniqueName: \"kubernetes.io/projected/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-kube-api-access-gb8mr\") pod \"nova-scheduler-0\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " pod="openstack/nova-scheduler-0" Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.894578 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:24 crc kubenswrapper[4813]: I1007 19:37:24.936758 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.072812 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7wcx\" (UniqueName: \"kubernetes.io/projected/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-kube-api-access-w7wcx\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.073702 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-config-data\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.073730 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-logs\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.073770 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.175071 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-config-data\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.175117 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-logs\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.175157 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.175305 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7wcx\" (UniqueName: \"kubernetes.io/projected/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-kube-api-access-w7wcx\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.177030 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-logs\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.184491 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-config-data\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.191062 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.196375 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7wcx\" (UniqueName: \"kubernetes.io/projected/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-kube-api-access-w7wcx\") pod \"nova-api-0\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.218370 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.353262 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.535476 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b192b8d-45cf-450f-a67d-a6f9b1a8326a","Type":"ContainerStarted","Data":"f186e0ec0ea8e5bcaa7770645837e872ace89680da3043803ae992e80d70793f"} Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.536128 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b192b8d-45cf-450f-a67d-a6f9b1a8326a","Type":"ContainerStarted","Data":"588682a67ff6eaa53211c684530bbc264cd37a57d0b66e19daa76e54ee502331"} Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.570707 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.570684737 podStartE2EDuration="1.570684737s" podCreationTimestamp="2025-10-07 19:37:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:25.552289642 +0000 UTC m=+1171.630545263" watchObservedRunningTime="2025-10-07 19:37:25.570684737 +0000 UTC m=+1171.648940348" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.848189 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.848413 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="9e7e2083-472f-4551-840c-35943117bcb0" containerName="kube-state-metrics" containerID="cri-o://49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777" gracePeriod=30 Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.879816 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.952385 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 19:37:25 crc kubenswrapper[4813]: I1007 19:37:25.952524 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.293563 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.310580 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.401923 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-tls-certs\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.402227 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-secret-key\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.402279 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aed6b0b2-d265-4f3f-a68b-215696e44617-logs\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.402379 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-config-data\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.402499 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-combined-ca-bundle\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.402572 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-scripts\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.402606 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpvgv\" (UniqueName: \"kubernetes.io/projected/aed6b0b2-d265-4f3f-a68b-215696e44617-kube-api-access-fpvgv\") pod \"aed6b0b2-d265-4f3f-a68b-215696e44617\" (UID: \"aed6b0b2-d265-4f3f-a68b-215696e44617\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.403002 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aed6b0b2-d265-4f3f-a68b-215696e44617-logs" (OuterVolumeSpecName: "logs") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.405225 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.410201 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed6b0b2-d265-4f3f-a68b-215696e44617-kube-api-access-fpvgv" (OuterVolumeSpecName: "kube-api-access-fpvgv") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "kube-api-access-fpvgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.439613 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-scripts" (OuterVolumeSpecName: "scripts") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.442167 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-config-data" (OuterVolumeSpecName: "config-data") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.452468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.463989 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "aed6b0b2-d265-4f3f-a68b-215696e44617" (UID: "aed6b0b2-d265-4f3f-a68b-215696e44617"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.503958 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw7jq\" (UniqueName: \"kubernetes.io/projected/9e7e2083-472f-4551-840c-35943117bcb0-kube-api-access-xw7jq\") pod \"9e7e2083-472f-4551-840c-35943117bcb0\" (UID: \"9e7e2083-472f-4551-840c-35943117bcb0\") " Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504437 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504451 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpvgv\" (UniqueName: \"kubernetes.io/projected/aed6b0b2-d265-4f3f-a68b-215696e44617-kube-api-access-fpvgv\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504463 4813 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504471 4813 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504479 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aed6b0b2-d265-4f3f-a68b-215696e44617-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504487 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed6b0b2-d265-4f3f-a68b-215696e44617-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.504494 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed6b0b2-d265-4f3f-a68b-215696e44617-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.507596 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e7e2083-472f-4551-840c-35943117bcb0-kube-api-access-xw7jq" (OuterVolumeSpecName: "kube-api-access-xw7jq") pod "9e7e2083-472f-4551-840c-35943117bcb0" (UID: "9e7e2083-472f-4551-840c-35943117bcb0"). InnerVolumeSpecName "kube-api-access-xw7jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.549404 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a","Type":"ContainerStarted","Data":"115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.549444 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a","Type":"ContainerStarted","Data":"4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.549453 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a","Type":"ContainerStarted","Data":"d3af6db5ab8cb0225643b8c136f1df4c6dcd44763e9a0780e0554f2fc088f736"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.552779 4813 generic.go:334] "Generic (PLEG): container finished" podID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerID="a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a" exitCode=137 Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.552825 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerDied","Data":"a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.552845 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688984b46d-g79nd" event={"ID":"aed6b0b2-d265-4f3f-a68b-215696e44617","Type":"ContainerDied","Data":"e23fb43d49e9fc4baa43b937116221cbe797ee6185a606bfc7317b8108f85f00"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.552861 4813 scope.go:117] "RemoveContainer" containerID="f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.552979 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688984b46d-g79nd" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.565452 4813 generic.go:334] "Generic (PLEG): container finished" podID="9e7e2083-472f-4551-840c-35943117bcb0" containerID="49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777" exitCode=2 Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.565717 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9e7e2083-472f-4551-840c-35943117bcb0","Type":"ContainerDied","Data":"49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.565772 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9e7e2083-472f-4551-840c-35943117bcb0","Type":"ContainerDied","Data":"ca78c531c220401fb7bdfefe1d3a270b79c2931e7c38e178a4d283c308a4c863"} Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.565822 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.604302 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.6042781440000002 podStartE2EDuration="2.604278144s" podCreationTimestamp="2025-10-07 19:37:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:26.587222308 +0000 UTC m=+1172.665477919" watchObservedRunningTime="2025-10-07 19:37:26.604278144 +0000 UTC m=+1172.682533745" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.606211 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw7jq\" (UniqueName: \"kubernetes.io/projected/9e7e2083-472f-4551-840c-35943117bcb0-kube-api-access-xw7jq\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.612893 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="386bb6a1-9d50-455e-bb53-9979dafceaed" path="/var/lib/kubelet/pods/386bb6a1-9d50-455e-bb53-9979dafceaed/volumes" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.614847 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.628438 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.659193 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-688984b46d-g79nd"] Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.672195 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.672873 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.672900 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.672925 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon-log" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.672934 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon-log" Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.672950 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.672957 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.673002 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e7e2083-472f-4551-840c-35943117bcb0" containerName="kube-state-metrics" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.673010 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e7e2083-472f-4551-840c-35943117bcb0" containerName="kube-state-metrics" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.673241 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon-log" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.673276 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e7e2083-472f-4551-840c-35943117bcb0" containerName="kube-state-metrics" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.673289 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.673302 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" containerName="horizon" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.674201 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.677420 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.677547 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.680809 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-688984b46d-g79nd"] Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.693656 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.742896 4813 scope.go:117] "RemoveContainer" containerID="a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.797105 4813 scope.go:117] "RemoveContainer" containerID="f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86" Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.798226 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86\": container with ID starting with f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86 not found: ID does not exist" containerID="f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.798267 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86"} err="failed to get container status \"f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86\": rpc error: code = NotFound desc = could not find container \"f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86\": container with ID starting with f1bd854737a326426b78611ee8f8ff6cc8e3e25919ce0e13d7cf9c200d469b86 not found: ID does not exist" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.798295 4813 scope.go:117] "RemoveContainer" containerID="a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a" Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.802503 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a\": container with ID starting with a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a not found: ID does not exist" containerID="a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.802555 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a"} err="failed to get container status \"a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a\": rpc error: code = NotFound desc = could not find container \"a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a\": container with ID starting with a5aefa846d511b0d10ce7e3e555be56ba447da1753db6a2450c175297d83d57a not found: ID does not exist" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.802583 4813 scope.go:117] "RemoveContainer" containerID="49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.816838 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8n44\" (UniqueName: \"kubernetes.io/projected/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-api-access-s8n44\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.816898 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.816944 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.817067 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.853082 4813 scope.go:117] "RemoveContainer" containerID="49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777" Oct 07 19:37:26 crc kubenswrapper[4813]: E1007 19:37:26.854335 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777\": container with ID starting with 49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777 not found: ID does not exist" containerID="49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.854391 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777"} err="failed to get container status \"49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777\": rpc error: code = NotFound desc = could not find container \"49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777\": container with ID starting with 49fa9893af1d075fb29447b47d75b9d982b4e2b7ce1b4f572364c4df76747777 not found: ID does not exist" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.918642 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.918740 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8n44\" (UniqueName: \"kubernetes.io/projected/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-api-access-s8n44\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.918769 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.918803 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.927019 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.930652 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.932009 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba18b055-6c70-4c3c-b464-8138c86bc3ea-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.950199 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8n44\" (UniqueName: \"kubernetes.io/projected/ba18b055-6c70-4c3c-b464-8138c86bc3ea-kube-api-access-s8n44\") pod \"kube-state-metrics-0\" (UID: \"ba18b055-6c70-4c3c-b464-8138c86bc3ea\") " pod="openstack/kube-state-metrics-0" Oct 07 19:37:26 crc kubenswrapper[4813]: I1007 19:37:26.996168 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 07 19:37:27 crc kubenswrapper[4813]: I1007 19:37:27.497448 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 07 19:37:27 crc kubenswrapper[4813]: W1007 19:37:27.504756 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba18b055_6c70_4c3c_b464_8138c86bc3ea.slice/crio-7998c3a5413d62ac9cafc596e46ff1c3814d6c00baf4e7cac4d5dc5a0ebaebc8 WatchSource:0}: Error finding container 7998c3a5413d62ac9cafc596e46ff1c3814d6c00baf4e7cac4d5dc5a0ebaebc8: Status 404 returned error can't find the container with id 7998c3a5413d62ac9cafc596e46ff1c3814d6c00baf4e7cac4d5dc5a0ebaebc8 Oct 07 19:37:27 crc kubenswrapper[4813]: I1007 19:37:27.507483 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:37:27 crc kubenswrapper[4813]: I1007 19:37:27.583763 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ba18b055-6c70-4c3c-b464-8138c86bc3ea","Type":"ContainerStarted","Data":"7998c3a5413d62ac9cafc596e46ff1c3814d6c00baf4e7cac4d5dc5a0ebaebc8"} Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.214270 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.215116 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-central-agent" containerID="cri-o://4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2" gracePeriod=30 Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.215466 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="proxy-httpd" containerID="cri-o://bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7" gracePeriod=30 Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.215669 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="sg-core" containerID="cri-o://4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a" gracePeriod=30 Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.215740 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-notification-agent" containerID="cri-o://b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc" gracePeriod=30 Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.595423 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ba18b055-6c70-4c3c-b464-8138c86bc3ea","Type":"ContainerStarted","Data":"7d79f5d0c89b38df1816c29518b38e70457032eccde082916d510b17a5b61896"} Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.595710 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.599570 4813 generic.go:334] "Generic (PLEG): container finished" podID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerID="bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7" exitCode=0 Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.599603 4813 generic.go:334] "Generic (PLEG): container finished" podID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerID="4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a" exitCode=2 Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.599624 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerDied","Data":"bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7"} Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.599646 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerDied","Data":"4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a"} Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.615535 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e7e2083-472f-4551-840c-35943117bcb0" path="/var/lib/kubelet/pods/9e7e2083-472f-4551-840c-35943117bcb0/volumes" Oct 07 19:37:28 crc kubenswrapper[4813]: I1007 19:37:28.616234 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aed6b0b2-d265-4f3f-a68b-215696e44617" path="/var/lib/kubelet/pods/aed6b0b2-d265-4f3f-a68b-215696e44617/volumes" Oct 07 19:37:29 crc kubenswrapper[4813]: I1007 19:37:29.616409 4813 generic.go:334] "Generic (PLEG): container finished" podID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerID="4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2" exitCode=0 Oct 07 19:37:29 crc kubenswrapper[4813]: I1007 19:37:29.616496 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerDied","Data":"4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2"} Oct 07 19:37:29 crc kubenswrapper[4813]: I1007 19:37:29.937742 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 19:37:30 crc kubenswrapper[4813]: I1007 19:37:30.881204 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 07 19:37:30 crc kubenswrapper[4813]: I1007 19:37:30.902941 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=4.53608583 podStartE2EDuration="4.902915978s" podCreationTimestamp="2025-10-07 19:37:26 +0000 UTC" firstStartedPulling="2025-10-07 19:37:27.50718832 +0000 UTC m=+1173.585443941" lastFinishedPulling="2025-10-07 19:37:27.874018478 +0000 UTC m=+1173.952274089" observedRunningTime="2025-10-07 19:37:28.610978899 +0000 UTC m=+1174.689234510" watchObservedRunningTime="2025-10-07 19:37:30.902915978 +0000 UTC m=+1176.981171619" Oct 07 19:37:30 crc kubenswrapper[4813]: I1007 19:37:30.951816 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 19:37:30 crc kubenswrapper[4813]: I1007 19:37:30.951861 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 19:37:31 crc kubenswrapper[4813]: I1007 19:37:31.963503 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 19:37:31 crc kubenswrapper[4813]: I1007 19:37:31.964659 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.248452 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259315 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ttxn\" (UniqueName: \"kubernetes.io/projected/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-kube-api-access-9ttxn\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259422 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-sg-core-conf-yaml\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259470 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-scripts\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259504 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-combined-ca-bundle\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259577 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-config-data\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259631 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-run-httpd\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.259721 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-log-httpd\") pod \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\" (UID: \"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8\") " Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.260477 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.260613 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.267618 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-scripts" (OuterVolumeSpecName: "scripts") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.268966 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-kube-api-access-9ttxn" (OuterVolumeSpecName: "kube-api-access-9ttxn") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "kube-api-access-9ttxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.346291 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.361537 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ttxn\" (UniqueName: \"kubernetes.io/projected/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-kube-api-access-9ttxn\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.361565 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.361575 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.361583 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.361591 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.388700 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.420948 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-config-data" (OuterVolumeSpecName: "config-data") pod "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" (UID: "daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.462790 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.462828 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.657722 4813 generic.go:334] "Generic (PLEG): container finished" podID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerID="b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc" exitCode=0 Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.657770 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerDied","Data":"b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc"} Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.657804 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8","Type":"ContainerDied","Data":"d3e0385f15ad671b8278ae1bb891a260fb8b97edd0d300e19275f7718e94e06e"} Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.657813 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.657826 4813 scope.go:117] "RemoveContainer" containerID="bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.679545 4813 scope.go:117] "RemoveContainer" containerID="4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.693286 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.704022 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.717484 4813 scope.go:117] "RemoveContainer" containerID="b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.740717 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.741186 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="sg-core" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741208 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="sg-core" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.741228 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-central-agent" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741236 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-central-agent" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.741261 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-notification-agent" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741268 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-notification-agent" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.741296 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="proxy-httpd" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741302 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="proxy-httpd" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741530 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-notification-agent" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741546 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="sg-core" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741566 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="proxy-httpd" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.741578 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" containerName="ceilometer-central-agent" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.743622 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.748629 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.748809 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.748908 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.753458 4813 scope.go:117] "RemoveContainer" containerID="4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.758294 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767263 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzjtq\" (UniqueName: \"kubernetes.io/projected/d4a305cf-ba3f-4428-8f49-edc4271c9746-kube-api-access-dzjtq\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767354 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-config-data\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767374 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-run-httpd\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767404 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767431 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-log-httpd\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767464 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767504 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-scripts\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.767526 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.779970 4813 scope.go:117] "RemoveContainer" containerID="bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.780353 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7\": container with ID starting with bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7 not found: ID does not exist" containerID="bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.780381 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7"} err="failed to get container status \"bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7\": rpc error: code = NotFound desc = could not find container \"bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7\": container with ID starting with bbf71f3834dbef8e3cc0fd4660bbf2b136de7a05972d5d8870d4a2bbd665b6e7 not found: ID does not exist" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.780401 4813 scope.go:117] "RemoveContainer" containerID="4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.780578 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a\": container with ID starting with 4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a not found: ID does not exist" containerID="4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.780600 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a"} err="failed to get container status \"4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a\": rpc error: code = NotFound desc = could not find container \"4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a\": container with ID starting with 4d6e1269165ea84b2f9eeae2d2f953c6b6fdc91498f27cdc9771c593cda52c8a not found: ID does not exist" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.780613 4813 scope.go:117] "RemoveContainer" containerID="b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.780778 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc\": container with ID starting with b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc not found: ID does not exist" containerID="b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.780799 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc"} err="failed to get container status \"b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc\": rpc error: code = NotFound desc = could not find container \"b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc\": container with ID starting with b9684e871277cb484908122d63213622b54c1681a2c3b8348b097330462b0ddc not found: ID does not exist" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.780813 4813 scope.go:117] "RemoveContainer" containerID="4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2" Oct 07 19:37:33 crc kubenswrapper[4813]: E1007 19:37:33.781052 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2\": container with ID starting with 4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2 not found: ID does not exist" containerID="4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.781095 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2"} err="failed to get container status \"4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2\": rpc error: code = NotFound desc = could not find container \"4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2\": container with ID starting with 4c5d5a61cc87a1b0505c17aaf364bd893be00266b977a1b04887d83208cb23f2 not found: ID does not exist" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869018 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869089 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-log-httpd\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869145 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869212 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-scripts\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869245 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869278 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzjtq\" (UniqueName: \"kubernetes.io/projected/d4a305cf-ba3f-4428-8f49-edc4271c9746-kube-api-access-dzjtq\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869369 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-config-data\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869392 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-run-httpd\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.869860 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-log-httpd\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.870169 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-run-httpd\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.874027 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.874592 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.877117 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-config-data\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.893562 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-scripts\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.895078 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:33 crc kubenswrapper[4813]: I1007 19:37:33.896067 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzjtq\" (UniqueName: \"kubernetes.io/projected/d4a305cf-ba3f-4428-8f49-edc4271c9746-kube-api-access-dzjtq\") pod \"ceilometer-0\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " pod="openstack/ceilometer-0" Oct 07 19:37:34 crc kubenswrapper[4813]: I1007 19:37:34.063260 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:34 crc kubenswrapper[4813]: I1007 19:37:34.616674 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8" path="/var/lib/kubelet/pods/daee7ae3-e196-47d6-a8d4-2e7ecf5ce0b8/volumes" Oct 07 19:37:34 crc kubenswrapper[4813]: I1007 19:37:34.618238 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:34 crc kubenswrapper[4813]: W1007 19:37:34.618747 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4a305cf_ba3f_4428_8f49_edc4271c9746.slice/crio-28965ad5a69b43e3b70c42d5fed8ec56befa79b864e96f65590304c0f43d923f WatchSource:0}: Error finding container 28965ad5a69b43e3b70c42d5fed8ec56befa79b864e96f65590304c0f43d923f: Status 404 returned error can't find the container with id 28965ad5a69b43e3b70c42d5fed8ec56befa79b864e96f65590304c0f43d923f Oct 07 19:37:34 crc kubenswrapper[4813]: I1007 19:37:34.671168 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerStarted","Data":"28965ad5a69b43e3b70c42d5fed8ec56befa79b864e96f65590304c0f43d923f"} Oct 07 19:37:34 crc kubenswrapper[4813]: I1007 19:37:34.937775 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 19:37:34 crc kubenswrapper[4813]: I1007 19:37:34.987939 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 19:37:35 crc kubenswrapper[4813]: I1007 19:37:35.354227 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 19:37:35 crc kubenswrapper[4813]: I1007 19:37:35.354652 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 19:37:35 crc kubenswrapper[4813]: I1007 19:37:35.684189 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerStarted","Data":"2bdedfdf8127e350a42cac1b2596819f1ee848ce71e08ba537d05b11d7844258"} Oct 07 19:37:35 crc kubenswrapper[4813]: I1007 19:37:35.717461 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 19:37:36 crc kubenswrapper[4813]: I1007 19:37:36.438801 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 19:37:36 crc kubenswrapper[4813]: I1007 19:37:36.438906 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 07 19:37:36 crc kubenswrapper[4813]: I1007 19:37:36.694431 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerStarted","Data":"003e8649ff93ab13a4c7294ca0f96047af6cef83a6e8d1a1fefe7621978f8fec"} Oct 07 19:37:37 crc kubenswrapper[4813]: I1007 19:37:37.089493 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 07 19:37:37 crc kubenswrapper[4813]: I1007 19:37:37.706012 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerStarted","Data":"19f27a7448e92f2dab7c91dcf544a4eceea9fe3c2fcb71b454ede4863550fbfe"} Oct 07 19:37:39 crc kubenswrapper[4813]: I1007 19:37:39.737019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerStarted","Data":"fb73d5cfa03c8098507fc6ba19fb35beb26643cec00d1e9b9aee3fc1148a1ba8"} Oct 07 19:37:39 crc kubenswrapper[4813]: I1007 19:37:39.737576 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 19:37:39 crc kubenswrapper[4813]: I1007 19:37:39.775862 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.885865773 podStartE2EDuration="6.775842374s" podCreationTimestamp="2025-10-07 19:37:33 +0000 UTC" firstStartedPulling="2025-10-07 19:37:34.622290418 +0000 UTC m=+1180.700546049" lastFinishedPulling="2025-10-07 19:37:38.512267039 +0000 UTC m=+1184.590522650" observedRunningTime="2025-10-07 19:37:39.764484724 +0000 UTC m=+1185.842740375" watchObservedRunningTime="2025-10-07 19:37:39.775842374 +0000 UTC m=+1185.854097985" Oct 07 19:37:40 crc kubenswrapper[4813]: I1007 19:37:40.960521 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 19:37:40 crc kubenswrapper[4813]: I1007 19:37:40.969308 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 19:37:40 crc kubenswrapper[4813]: I1007 19:37:40.972066 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 19:37:41 crc kubenswrapper[4813]: I1007 19:37:41.768742 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.658529 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.769990 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d54pf\" (UniqueName: \"kubernetes.io/projected/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-kube-api-access-d54pf\") pod \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.770431 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-combined-ca-bundle\") pod \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.770542 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-config-data\") pod \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\" (UID: \"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f\") " Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.771603 4813 generic.go:334] "Generic (PLEG): container finished" podID="5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" containerID="d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af" exitCode=137 Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.772111 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f","Type":"ContainerDied","Data":"d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af"} Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.772151 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f","Type":"ContainerDied","Data":"410f95a2eff463bae2ec8c4f3f70d04f7c8b72cbc6840e5aba719313c204304d"} Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.772172 4813 scope.go:117] "RemoveContainer" containerID="d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.772246 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.783246 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-kube-api-access-d54pf" (OuterVolumeSpecName: "kube-api-access-d54pf") pod "5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" (UID: "5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f"). InnerVolumeSpecName "kube-api-access-d54pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.799438 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-config-data" (OuterVolumeSpecName: "config-data") pod "5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" (UID: "5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.800968 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" (UID: "5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.874474 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.874537 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d54pf\" (UniqueName: \"kubernetes.io/projected/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-kube-api-access-d54pf\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.874551 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.890264 4813 scope.go:117] "RemoveContainer" containerID="d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af" Oct 07 19:37:42 crc kubenswrapper[4813]: E1007 19:37:42.890836 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af\": container with ID starting with d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af not found: ID does not exist" containerID="d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af" Oct 07 19:37:42 crc kubenswrapper[4813]: I1007 19:37:42.890878 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af"} err="failed to get container status \"d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af\": rpc error: code = NotFound desc = could not find container \"d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af\": container with ID starting with d40d79cd6a67d88d20529f90bf876ca5b5e4786c5c0c8e1993033931bd08b2af not found: ID does not exist" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.139860 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.160666 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.178076 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:43 crc kubenswrapper[4813]: E1007 19:37:43.178651 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.178676 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.178917 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" containerName="nova-cell1-novncproxy-novncproxy" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.179824 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.189745 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.190002 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.190143 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.208434 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.287032 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.287118 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.287383 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.287586 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wg7q\" (UniqueName: \"kubernetes.io/projected/bed232f8-c7a0-446c-8667-0fb3afda3343-kube-api-access-9wg7q\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.287623 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.389059 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.389521 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.389592 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wg7q\" (UniqueName: \"kubernetes.io/projected/bed232f8-c7a0-446c-8667-0fb3afda3343-kube-api-access-9wg7q\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.389624 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.389701 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.393788 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.394750 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.399479 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.408091 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wg7q\" (UniqueName: \"kubernetes.io/projected/bed232f8-c7a0-446c-8667-0fb3afda3343-kube-api-access-9wg7q\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.409178 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bed232f8-c7a0-446c-8667-0fb3afda3343-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bed232f8-c7a0-446c-8667-0fb3afda3343\") " pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:43 crc kubenswrapper[4813]: I1007 19:37:43.513876 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:44 crc kubenswrapper[4813]: I1007 19:37:44.025929 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 07 19:37:44 crc kubenswrapper[4813]: W1007 19:37:44.033709 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbed232f8_c7a0_446c_8667_0fb3afda3343.slice/crio-2fbe02e6eec3ebbe64999011ca9e287406504f9f0af862c28b98fc1a9c87a7ae WatchSource:0}: Error finding container 2fbe02e6eec3ebbe64999011ca9e287406504f9f0af862c28b98fc1a9c87a7ae: Status 404 returned error can't find the container with id 2fbe02e6eec3ebbe64999011ca9e287406504f9f0af862c28b98fc1a9c87a7ae Oct 07 19:37:44 crc kubenswrapper[4813]: I1007 19:37:44.615391 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f" path="/var/lib/kubelet/pods/5ef4f9bc-9cb6-4a2f-8e58-ca2ad8cb2d2f/volumes" Oct 07 19:37:44 crc kubenswrapper[4813]: I1007 19:37:44.795090 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bed232f8-c7a0-446c-8667-0fb3afda3343","Type":"ContainerStarted","Data":"752daa6e50881544b0a927078967581ce614b3bcd5a2dff6b5eb854cdc56364e"} Oct 07 19:37:44 crc kubenswrapper[4813]: I1007 19:37:44.795567 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bed232f8-c7a0-446c-8667-0fb3afda3343","Type":"ContainerStarted","Data":"2fbe02e6eec3ebbe64999011ca9e287406504f9f0af862c28b98fc1a9c87a7ae"} Oct 07 19:37:44 crc kubenswrapper[4813]: I1007 19:37:44.823201 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.823178272 podStartE2EDuration="1.823178272s" podCreationTimestamp="2025-10-07 19:37:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:44.819829334 +0000 UTC m=+1190.898084955" watchObservedRunningTime="2025-10-07 19:37:44.823178272 +0000 UTC m=+1190.901433883" Oct 07 19:37:45 crc kubenswrapper[4813]: I1007 19:37:45.357943 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 19:37:45 crc kubenswrapper[4813]: I1007 19:37:45.358968 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 19:37:45 crc kubenswrapper[4813]: I1007 19:37:45.365552 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 19:37:45 crc kubenswrapper[4813]: I1007 19:37:45.373154 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 19:37:45 crc kubenswrapper[4813]: I1007 19:37:45.805684 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 19:37:45 crc kubenswrapper[4813]: I1007 19:37:45.809614 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.046723 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mlwlg"] Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.050567 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.089440 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mlwlg"] Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.153182 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.153250 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.153820 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-config\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.153863 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.153887 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.153935 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc2ns\" (UniqueName: \"kubernetes.io/projected/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-kube-api-access-vc2ns\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.256140 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-config\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.256206 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.256233 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.256283 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc2ns\" (UniqueName: \"kubernetes.io/projected/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-kube-api-access-vc2ns\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.256344 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.256379 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.257312 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.257512 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.258808 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.258849 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-config\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.259207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.274702 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc2ns\" (UniqueName: \"kubernetes.io/projected/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-kube-api-access-vc2ns\") pod \"dnsmasq-dns-59cf4bdb65-mlwlg\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.399365 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:46 crc kubenswrapper[4813]: I1007 19:37:46.854836 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mlwlg"] Oct 07 19:37:47 crc kubenswrapper[4813]: I1007 19:37:47.822064 4813 generic.go:334] "Generic (PLEG): container finished" podID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerID="784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790" exitCode=0 Oct 07 19:37:47 crc kubenswrapper[4813]: I1007 19:37:47.822163 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" event={"ID":"bd8428c7-dcb9-48eb-977f-61453fbb4dc2","Type":"ContainerDied","Data":"784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790"} Oct 07 19:37:47 crc kubenswrapper[4813]: I1007 19:37:47.822628 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" event={"ID":"bd8428c7-dcb9-48eb-977f-61453fbb4dc2","Type":"ContainerStarted","Data":"c9ae81428039940a46e04b2e17c564994726bd7cfcaaed62233aba7ff7713b48"} Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.256180 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.256563 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-central-agent" containerID="cri-o://2bdedfdf8127e350a42cac1b2596819f1ee848ce71e08ba537d05b11d7844258" gracePeriod=30 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.256680 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="proxy-httpd" containerID="cri-o://fb73d5cfa03c8098507fc6ba19fb35beb26643cec00d1e9b9aee3fc1148a1ba8" gracePeriod=30 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.256742 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="sg-core" containerID="cri-o://19f27a7448e92f2dab7c91dcf544a4eceea9fe3c2fcb71b454ede4863550fbfe" gracePeriod=30 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.256707 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-notification-agent" containerID="cri-o://003e8649ff93ab13a4c7294ca0f96047af6cef83a6e8d1a1fefe7621978f8fec" gracePeriod=30 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.362154 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.198:3000/\": read tcp 10.217.0.2:34628->10.217.0.198:3000: read: connection reset by peer" Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.514664 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.600511 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.832310 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" event={"ID":"bd8428c7-dcb9-48eb-977f-61453fbb4dc2","Type":"ContainerStarted","Data":"672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57"} Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.833929 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837458 4813 generic.go:334] "Generic (PLEG): container finished" podID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerID="fb73d5cfa03c8098507fc6ba19fb35beb26643cec00d1e9b9aee3fc1148a1ba8" exitCode=0 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837502 4813 generic.go:334] "Generic (PLEG): container finished" podID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerID="19f27a7448e92f2dab7c91dcf544a4eceea9fe3c2fcb71b454ede4863550fbfe" exitCode=2 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837511 4813 generic.go:334] "Generic (PLEG): container finished" podID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerID="2bdedfdf8127e350a42cac1b2596819f1ee848ce71e08ba537d05b11d7844258" exitCode=0 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837559 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerDied","Data":"fb73d5cfa03c8098507fc6ba19fb35beb26643cec00d1e9b9aee3fc1148a1ba8"} Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837621 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerDied","Data":"19f27a7448e92f2dab7c91dcf544a4eceea9fe3c2fcb71b454ede4863550fbfe"} Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837637 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerDied","Data":"2bdedfdf8127e350a42cac1b2596819f1ee848ce71e08ba537d05b11d7844258"} Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837743 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-log" containerID="cri-o://4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43" gracePeriod=30 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.837813 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-api" containerID="cri-o://115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658" gracePeriod=30 Oct 07 19:37:48 crc kubenswrapper[4813]: I1007 19:37:48.857085 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" podStartSLOduration=2.8570654170000003 podStartE2EDuration="2.857065417s" podCreationTimestamp="2025-10-07 19:37:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:48.848788636 +0000 UTC m=+1194.927044247" watchObservedRunningTime="2025-10-07 19:37:48.857065417 +0000 UTC m=+1194.935321028" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:49.847824 4813 generic.go:334] "Generic (PLEG): container finished" podID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerID="003e8649ff93ab13a4c7294ca0f96047af6cef83a6e8d1a1fefe7621978f8fec" exitCode=0 Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:49.847866 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerDied","Data":"003e8649ff93ab13a4c7294ca0f96047af6cef83a6e8d1a1fefe7621978f8fec"} Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:49.850065 4813 generic.go:334] "Generic (PLEG): container finished" podID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerID="4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43" exitCode=143 Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:49.850137 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a","Type":"ContainerDied","Data":"4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43"} Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:49.973660 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047400 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzjtq\" (UniqueName: \"kubernetes.io/projected/d4a305cf-ba3f-4428-8f49-edc4271c9746-kube-api-access-dzjtq\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047685 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-ceilometer-tls-certs\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-run-httpd\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047872 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-config-data\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047921 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-combined-ca-bundle\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047962 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-scripts\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.047993 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-log-httpd\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.048020 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-sg-core-conf-yaml\") pod \"d4a305cf-ba3f-4428-8f49-edc4271c9746\" (UID: \"d4a305cf-ba3f-4428-8f49-edc4271c9746\") " Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.055783 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.056200 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.062917 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a305cf-ba3f-4428-8f49-edc4271c9746-kube-api-access-dzjtq" (OuterVolumeSpecName: "kube-api-access-dzjtq") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "kube-api-access-dzjtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.065509 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-scripts" (OuterVolumeSpecName: "scripts") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.130563 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.151542 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.151569 4813 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.151581 4813 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.151593 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzjtq\" (UniqueName: \"kubernetes.io/projected/d4a305cf-ba3f-4428-8f49-edc4271c9746-kube-api-access-dzjtq\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.151603 4813 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4a305cf-ba3f-4428-8f49-edc4271c9746-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.159061 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.222482 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.236098 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-config-data" (OuterVolumeSpecName: "config-data") pod "d4a305cf-ba3f-4428-8f49-edc4271c9746" (UID: "d4a305cf-ba3f-4428-8f49-edc4271c9746"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.253399 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.253432 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.253445 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4a305cf-ba3f-4428-8f49-edc4271c9746-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.863580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4a305cf-ba3f-4428-8f49-edc4271c9746","Type":"ContainerDied","Data":"28965ad5a69b43e3b70c42d5fed8ec56befa79b864e96f65590304c0f43d923f"} Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.863682 4813 scope.go:117] "RemoveContainer" containerID="fb73d5cfa03c8098507fc6ba19fb35beb26643cec00d1e9b9aee3fc1148a1ba8" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.863625 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.899662 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.912630 4813 scope.go:117] "RemoveContainer" containerID="19f27a7448e92f2dab7c91dcf544a4eceea9fe3c2fcb71b454ede4863550fbfe" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.926848 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.948821 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:50 crc kubenswrapper[4813]: E1007 19:37:50.949434 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="proxy-httpd" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949453 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="proxy-httpd" Oct 07 19:37:50 crc kubenswrapper[4813]: E1007 19:37:50.949465 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-central-agent" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949471 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-central-agent" Oct 07 19:37:50 crc kubenswrapper[4813]: E1007 19:37:50.949510 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-notification-agent" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949518 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-notification-agent" Oct 07 19:37:50 crc kubenswrapper[4813]: E1007 19:37:50.949538 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="sg-core" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949543 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="sg-core" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949794 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-central-agent" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949879 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="proxy-httpd" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949889 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="ceilometer-notification-agent" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.949904 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" containerName="sg-core" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.954729 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.956558 4813 scope.go:117] "RemoveContainer" containerID="003e8649ff93ab13a4c7294ca0f96047af6cef83a6e8d1a1fefe7621978f8fec" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.958664 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.961635 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.963553 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 07 19:37:50 crc kubenswrapper[4813]: I1007 19:37:50.965259 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.003125 4813 scope.go:117] "RemoveContainer" containerID="2bdedfdf8127e350a42cac1b2596819f1ee848ce71e08ba537d05b11d7844258" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068281 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8b4b\" (UniqueName: \"kubernetes.io/projected/e4331b72-d366-4e3d-972d-419bacf0d2f2-kube-api-access-n8b4b\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068697 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068732 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068821 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-config-data\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068888 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-scripts\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.068962 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4331b72-d366-4e3d-972d-419bacf0d2f2-log-httpd\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.069056 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4331b72-d366-4e3d-972d-419bacf0d2f2-run-httpd\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173778 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8b4b\" (UniqueName: \"kubernetes.io/projected/e4331b72-d366-4e3d-972d-419bacf0d2f2-kube-api-access-n8b4b\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173850 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173875 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173909 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173930 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-config-data\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173955 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-scripts\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.173971 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4331b72-d366-4e3d-972d-419bacf0d2f2-log-httpd\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.174006 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4331b72-d366-4e3d-972d-419bacf0d2f2-run-httpd\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.174650 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4331b72-d366-4e3d-972d-419bacf0d2f2-run-httpd\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.175169 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4331b72-d366-4e3d-972d-419bacf0d2f2-log-httpd\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.178993 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-scripts\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.180549 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.180816 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.182960 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.186761 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4331b72-d366-4e3d-972d-419bacf0d2f2-config-data\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.199078 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8b4b\" (UniqueName: \"kubernetes.io/projected/e4331b72-d366-4e3d-972d-419bacf0d2f2-kube-api-access-n8b4b\") pod \"ceilometer-0\" (UID: \"e4331b72-d366-4e3d-972d-419bacf0d2f2\") " pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.280280 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.772262 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 07 19:37:51 crc kubenswrapper[4813]: I1007 19:37:51.874989 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4331b72-d366-4e3d-972d-419bacf0d2f2","Type":"ContainerStarted","Data":"5c4a6bd7b07d8a4d789b5be17a9866fdd9d9ff357a9814b34f5feff8919e556e"} Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.375525 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.511908 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-combined-ca-bundle\") pod \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.512061 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-logs\") pod \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.512103 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7wcx\" (UniqueName: \"kubernetes.io/projected/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-kube-api-access-w7wcx\") pod \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.512148 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-config-data\") pod \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\" (UID: \"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a\") " Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.513575 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-logs" (OuterVolumeSpecName: "logs") pod "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" (UID: "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.517308 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-kube-api-access-w7wcx" (OuterVolumeSpecName: "kube-api-access-w7wcx") pod "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" (UID: "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a"). InnerVolumeSpecName "kube-api-access-w7wcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.551845 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" (UID: "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.569478 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-config-data" (OuterVolumeSpecName: "config-data") pod "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" (UID: "0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.613622 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4a305cf-ba3f-4428-8f49-edc4271c9746" path="/var/lib/kubelet/pods/d4a305cf-ba3f-4428-8f49-edc4271c9746/volumes" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.616032 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.616113 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7wcx\" (UniqueName: \"kubernetes.io/projected/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-kube-api-access-w7wcx\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.616191 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.616247 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.883557 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4331b72-d366-4e3d-972d-419bacf0d2f2","Type":"ContainerStarted","Data":"8d1b58f3c385c8bf5f22bfd4993546c53c1823272aa495a3b5bed3d800713785"} Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.886536 4813 generic.go:334] "Generic (PLEG): container finished" podID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerID="115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658" exitCode=0 Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.886570 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a","Type":"ContainerDied","Data":"115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658"} Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.886590 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a","Type":"ContainerDied","Data":"d3af6db5ab8cb0225643b8c136f1df4c6dcd44763e9a0780e0554f2fc088f736"} Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.886608 4813 scope.go:117] "RemoveContainer" containerID="115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.886710 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.915412 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.923399 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.927678 4813 scope.go:117] "RemoveContainer" containerID="4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.963774 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:52 crc kubenswrapper[4813]: E1007 19:37:52.965163 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-api" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.966466 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-api" Oct 07 19:37:52 crc kubenswrapper[4813]: E1007 19:37:52.966521 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-log" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.966528 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-log" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.967058 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-log" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.967097 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" containerName="nova-api-api" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.973426 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.976423 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.986493 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.989296 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.989505 4813 scope.go:117] "RemoveContainer" containerID="115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658" Oct 07 19:37:52 crc kubenswrapper[4813]: E1007 19:37:52.989979 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658\": container with ID starting with 115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658 not found: ID does not exist" containerID="115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.990008 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658"} err="failed to get container status \"115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658\": rpc error: code = NotFound desc = could not find container \"115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658\": container with ID starting with 115682a81ac9c76144adda666de8d785bc56b7b09b5f795951938e27bde89658 not found: ID does not exist" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.990033 4813 scope.go:117] "RemoveContainer" containerID="4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43" Oct 07 19:37:52 crc kubenswrapper[4813]: E1007 19:37:52.990431 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43\": container with ID starting with 4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43 not found: ID does not exist" containerID="4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.990459 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43"} err="failed to get container status \"4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43\": rpc error: code = NotFound desc = could not find container \"4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43\": container with ID starting with 4a77ec1d1617d6d93b442d2400e0a7811f181cfb7349ef5f5c02e14986ad0f43 not found: ID does not exist" Oct 07 19:37:52 crc kubenswrapper[4813]: I1007 19:37:52.993648 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.029603 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-config-data\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.029726 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-public-tls-certs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.029762 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7n56\" (UniqueName: \"kubernetes.io/projected/352fd190-a3ed-4279-97d3-e4759c29930d-kube-api-access-q7n56\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.029788 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.029835 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352fd190-a3ed-4279-97d3-e4759c29930d-logs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.029857 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.134527 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7n56\" (UniqueName: \"kubernetes.io/projected/352fd190-a3ed-4279-97d3-e4759c29930d-kube-api-access-q7n56\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.135402 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.135436 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352fd190-a3ed-4279-97d3-e4759c29930d-logs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.135473 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.135576 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-config-data\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.135701 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-public-tls-certs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.141665 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352fd190-a3ed-4279-97d3-e4759c29930d-logs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.142159 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.148301 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.148940 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-config-data\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.153477 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-public-tls-certs\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.160122 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7n56\" (UniqueName: \"kubernetes.io/projected/352fd190-a3ed-4279-97d3-e4759c29930d-kube-api-access-q7n56\") pod \"nova-api-0\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.322990 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.516660 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.545167 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.817971 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:37:53 crc kubenswrapper[4813]: W1007 19:37:53.879582 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod352fd190_a3ed_4279_97d3_e4759c29930d.slice/crio-c2ac42219f20c6c4f292fd0a0c0cb95df5d8b733b3b5abc3d93ef14196720e07 WatchSource:0}: Error finding container c2ac42219f20c6c4f292fd0a0c0cb95df5d8b733b3b5abc3d93ef14196720e07: Status 404 returned error can't find the container with id c2ac42219f20c6c4f292fd0a0c0cb95df5d8b733b3b5abc3d93ef14196720e07 Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.905556 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"352fd190-a3ed-4279-97d3-e4759c29930d","Type":"ContainerStarted","Data":"c2ac42219f20c6c4f292fd0a0c0cb95df5d8b733b3b5abc3d93ef14196720e07"} Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.906830 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4331b72-d366-4e3d-972d-419bacf0d2f2","Type":"ContainerStarted","Data":"11818ec6b626ec05edb4bd3be51f5b6763d042a20da6b153a43c4fc41b8e72f2"} Oct 07 19:37:53 crc kubenswrapper[4813]: I1007 19:37:53.931735 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.114967 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-p98qg"] Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.116253 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.124701 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.124973 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.128525 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-p98qg"] Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.165100 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp7fw\" (UniqueName: \"kubernetes.io/projected/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-kube-api-access-rp7fw\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.165237 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-config-data\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.165269 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-scripts\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.165443 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.267286 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-config-data\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.267356 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-scripts\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.267406 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.267508 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp7fw\" (UniqueName: \"kubernetes.io/projected/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-kube-api-access-rp7fw\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.273219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-scripts\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.279989 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-config-data\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.280577 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.287909 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp7fw\" (UniqueName: \"kubernetes.io/projected/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-kube-api-access-rp7fw\") pod \"nova-cell1-cell-mapping-p98qg\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.448859 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.632109 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a" path="/var/lib/kubelet/pods/0f8b9e92-4a4f-4a9a-a743-7fd3aa1ebd6a/volumes" Oct 07 19:37:54 crc kubenswrapper[4813]: I1007 19:37:54.922077 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"352fd190-a3ed-4279-97d3-e4759c29930d","Type":"ContainerStarted","Data":"2252c3690ed0a39186e1b4b3f79dcb917d49b201001e609e98c5cd49d408930d"} Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:54.924282 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"352fd190-a3ed-4279-97d3-e4759c29930d","Type":"ContainerStarted","Data":"5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb"} Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:54.930383 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4331b72-d366-4e3d-972d-419bacf0d2f2","Type":"ContainerStarted","Data":"d8991d76a9c71064651e5c6b81b690e05ea06572c87216cfd7f25e54bee721ae"} Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:54.961969 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.961945908 podStartE2EDuration="2.961945908s" podCreationTimestamp="2025-10-07 19:37:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:54.958768755 +0000 UTC m=+1201.037024386" watchObservedRunningTime="2025-10-07 19:37:54.961945908 +0000 UTC m=+1201.040201519" Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.002104 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-p98qg"] Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.941652 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4331b72-d366-4e3d-972d-419bacf0d2f2","Type":"ContainerStarted","Data":"5c72b04ac8d160206c42e174aec03672590a1222c8909b022c208f93390450d8"} Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.942084 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.947481 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p98qg" event={"ID":"4e7b45b6-c0c5-43d8-be53-5278bf7fde77","Type":"ContainerStarted","Data":"394f16cc57875dc8c3e48f15387096bfd62e91ea4af9fff21389f55e93e0e8b6"} Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.947567 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p98qg" event={"ID":"4e7b45b6-c0c5-43d8-be53-5278bf7fde77","Type":"ContainerStarted","Data":"20940b302346773af9079e1be7e659e379c80991cea7b20dd063b85e74814d65"} Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.987462 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.113910506 podStartE2EDuration="5.987446659s" podCreationTimestamp="2025-10-07 19:37:50 +0000 UTC" firstStartedPulling="2025-10-07 19:37:51.770888632 +0000 UTC m=+1197.849144253" lastFinishedPulling="2025-10-07 19:37:55.644424785 +0000 UTC m=+1201.722680406" observedRunningTime="2025-10-07 19:37:55.973753771 +0000 UTC m=+1202.052009372" watchObservedRunningTime="2025-10-07 19:37:55.987446659 +0000 UTC m=+1202.065702270" Oct 07 19:37:55 crc kubenswrapper[4813]: I1007 19:37:55.994836 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-p98qg" podStartSLOduration=1.994819273 podStartE2EDuration="1.994819273s" podCreationTimestamp="2025-10-07 19:37:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:37:55.99334309 +0000 UTC m=+1202.071598701" watchObservedRunningTime="2025-10-07 19:37:55.994819273 +0000 UTC m=+1202.073074884" Oct 07 19:37:56 crc kubenswrapper[4813]: I1007 19:37:56.402532 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:37:56 crc kubenswrapper[4813]: I1007 19:37:56.496370 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-rjg4f"] Oct 07 19:37:56 crc kubenswrapper[4813]: I1007 19:37:56.496591 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerName="dnsmasq-dns" containerID="cri-o://8c53e55169f997bee4439bd5317a7b260c203ec4067146e865fdd672e844ec96" gracePeriod=10 Oct 07 19:37:56 crc kubenswrapper[4813]: I1007 19:37:56.964892 4813 generic.go:334] "Generic (PLEG): container finished" podID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerID="8c53e55169f997bee4439bd5317a7b260c203ec4067146e865fdd672e844ec96" exitCode=0 Oct 07 19:37:56 crc kubenswrapper[4813]: I1007 19:37:56.965165 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" event={"ID":"4b3b876a-cd89-4a2c-8179-74c000809b17","Type":"ContainerDied","Data":"8c53e55169f997bee4439bd5317a7b260c203ec4067146e865fdd672e844ec96"} Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.089293 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.190037 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-config\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.190096 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-swift-storage-0\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.190210 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-sb\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.190246 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-nb\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.190430 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7jq7\" (UniqueName: \"kubernetes.io/projected/4b3b876a-cd89-4a2c-8179-74c000809b17-kube-api-access-p7jq7\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.190496 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.217576 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b3b876a-cd89-4a2c-8179-74c000809b17-kube-api-access-p7jq7" (OuterVolumeSpecName: "kube-api-access-p7jq7") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "kube-api-access-p7jq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.236169 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.265841 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-config" (OuterVolumeSpecName: "config") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.276302 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.290965 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.291232 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.291850 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc\") pod \"4b3b876a-cd89-4a2c-8179-74c000809b17\" (UID: \"4b3b876a-cd89-4a2c-8179-74c000809b17\") " Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.292277 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7jq7\" (UniqueName: \"kubernetes.io/projected/4b3b876a-cd89-4a2c-8179-74c000809b17-kube-api-access-p7jq7\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.292292 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.292301 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.292309 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:57 crc kubenswrapper[4813]: W1007 19:37:57.292340 4813 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/4b3b876a-cd89-4a2c-8179-74c000809b17/volumes/kubernetes.io~configmap/dns-svc Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.292369 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.292366 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b3b876a-cd89-4a2c-8179-74c000809b17" (UID: "4b3b876a-cd89-4a2c-8179-74c000809b17"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.394722 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b3b876a-cd89-4a2c-8179-74c000809b17-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.973848 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" event={"ID":"4b3b876a-cd89-4a2c-8179-74c000809b17","Type":"ContainerDied","Data":"635aa09041cb2a05a8227eca9d077ef4ce7bb9be7dfca998f5ba24838f6250c4"} Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.974594 4813 scope.go:117] "RemoveContainer" containerID="8c53e55169f997bee4439bd5317a7b260c203ec4067146e865fdd672e844ec96" Oct 07 19:37:57 crc kubenswrapper[4813]: I1007 19:37:57.974795 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-rjg4f" Oct 07 19:37:58 crc kubenswrapper[4813]: I1007 19:37:58.013695 4813 scope.go:117] "RemoveContainer" containerID="7beb608e6dd5b9709919370e458097c003df8852d56ed0182b99e4e14bc81a42" Oct 07 19:37:58 crc kubenswrapper[4813]: I1007 19:37:58.048810 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-rjg4f"] Oct 07 19:37:58 crc kubenswrapper[4813]: I1007 19:37:58.048946 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-rjg4f"] Oct 07 19:37:58 crc kubenswrapper[4813]: I1007 19:37:58.626034 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" path="/var/lib/kubelet/pods/4b3b876a-cd89-4a2c-8179-74c000809b17/volumes" Oct 07 19:38:01 crc kubenswrapper[4813]: I1007 19:38:01.018784 4813 generic.go:334] "Generic (PLEG): container finished" podID="4e7b45b6-c0c5-43d8-be53-5278bf7fde77" containerID="394f16cc57875dc8c3e48f15387096bfd62e91ea4af9fff21389f55e93e0e8b6" exitCode=0 Oct 07 19:38:01 crc kubenswrapper[4813]: I1007 19:38:01.019116 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p98qg" event={"ID":"4e7b45b6-c0c5-43d8-be53-5278bf7fde77","Type":"ContainerDied","Data":"394f16cc57875dc8c3e48f15387096bfd62e91ea4af9fff21389f55e93e0e8b6"} Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.497777 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.618565 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp7fw\" (UniqueName: \"kubernetes.io/projected/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-kube-api-access-rp7fw\") pod \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.618677 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-scripts\") pod \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.618864 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-combined-ca-bundle\") pod \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.618976 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-config-data\") pod \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\" (UID: \"4e7b45b6-c0c5-43d8-be53-5278bf7fde77\") " Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.627744 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-scripts" (OuterVolumeSpecName: "scripts") pod "4e7b45b6-c0c5-43d8-be53-5278bf7fde77" (UID: "4e7b45b6-c0c5-43d8-be53-5278bf7fde77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.629204 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-kube-api-access-rp7fw" (OuterVolumeSpecName: "kube-api-access-rp7fw") pod "4e7b45b6-c0c5-43d8-be53-5278bf7fde77" (UID: "4e7b45b6-c0c5-43d8-be53-5278bf7fde77"). InnerVolumeSpecName "kube-api-access-rp7fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.671474 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-config-data" (OuterVolumeSpecName: "config-data") pod "4e7b45b6-c0c5-43d8-be53-5278bf7fde77" (UID: "4e7b45b6-c0c5-43d8-be53-5278bf7fde77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.671961 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e7b45b6-c0c5-43d8-be53-5278bf7fde77" (UID: "4e7b45b6-c0c5-43d8-be53-5278bf7fde77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.722073 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.722120 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.722137 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp7fw\" (UniqueName: \"kubernetes.io/projected/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-kube-api-access-rp7fw\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:02 crc kubenswrapper[4813]: I1007 19:38:02.722153 4813 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e7b45b6-c0c5-43d8-be53-5278bf7fde77-scripts\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.053690 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-p98qg" event={"ID":"4e7b45b6-c0c5-43d8-be53-5278bf7fde77","Type":"ContainerDied","Data":"20940b302346773af9079e1be7e659e379c80991cea7b20dd063b85e74814d65"} Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.053769 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20940b302346773af9079e1be7e659e379c80991cea7b20dd063b85e74814d65" Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.053786 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-p98qg" Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.259109 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.259422 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-log" containerID="cri-o://5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb" gracePeriod=30 Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.259475 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-api" containerID="cri-o://2252c3690ed0a39186e1b4b3f79dcb917d49b201001e609e98c5cd49d408930d" gracePeriod=30 Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.280938 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.281422 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8b192b8d-45cf-450f-a67d-a6f9b1a8326a" containerName="nova-scheduler-scheduler" containerID="cri-o://f186e0ec0ea8e5bcaa7770645837e872ace89680da3043803ae992e80d70793f" gracePeriod=30 Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.295981 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.296403 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-log" containerID="cri-o://721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327" gracePeriod=30 Oct 07 19:38:03 crc kubenswrapper[4813]: I1007 19:38:03.296780 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-metadata" containerID="cri-o://3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e" gracePeriod=30 Oct 07 19:38:03 crc kubenswrapper[4813]: E1007 19:38:03.400740 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod352fd190_a3ed_4279_97d3_e4759c29930d.slice/crio-conmon-5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21847ab9_4438_4203_ba7e_de3231e5e3ba.slice/crio-conmon-721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21847ab9_4438_4203_ba7e_de3231e5e3ba.slice/crio-721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod352fd190_a3ed_4279_97d3_e4759c29930d.slice/crio-5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb.scope\": RecentStats: unable to find data in memory cache]" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.074633 4813 generic.go:334] "Generic (PLEG): container finished" podID="352fd190-a3ed-4279-97d3-e4759c29930d" containerID="2252c3690ed0a39186e1b4b3f79dcb917d49b201001e609e98c5cd49d408930d" exitCode=0 Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.074939 4813 generic.go:334] "Generic (PLEG): container finished" podID="352fd190-a3ed-4279-97d3-e4759c29930d" containerID="5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb" exitCode=143 Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.075032 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"352fd190-a3ed-4279-97d3-e4759c29930d","Type":"ContainerDied","Data":"2252c3690ed0a39186e1b4b3f79dcb917d49b201001e609e98c5cd49d408930d"} Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.075063 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"352fd190-a3ed-4279-97d3-e4759c29930d","Type":"ContainerDied","Data":"5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb"} Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.090334 4813 generic.go:334] "Generic (PLEG): container finished" podID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerID="721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327" exitCode=143 Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.090501 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21847ab9-4438-4203-ba7e-de3231e5e3ba","Type":"ContainerDied","Data":"721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327"} Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.097636 4813 generic.go:334] "Generic (PLEG): container finished" podID="8b192b8d-45cf-450f-a67d-a6f9b1a8326a" containerID="f186e0ec0ea8e5bcaa7770645837e872ace89680da3043803ae992e80d70793f" exitCode=0 Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.097673 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b192b8d-45cf-450f-a67d-a6f9b1a8326a","Type":"ContainerDied","Data":"f186e0ec0ea8e5bcaa7770645837e872ace89680da3043803ae992e80d70793f"} Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.173454 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.327528 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.363965 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-internal-tls-certs\") pod \"352fd190-a3ed-4279-97d3-e4759c29930d\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.364157 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-public-tls-certs\") pod \"352fd190-a3ed-4279-97d3-e4759c29930d\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.364204 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352fd190-a3ed-4279-97d3-e4759c29930d-logs\") pod \"352fd190-a3ed-4279-97d3-e4759c29930d\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.364235 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-config-data\") pod \"352fd190-a3ed-4279-97d3-e4759c29930d\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.364274 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-combined-ca-bundle\") pod \"352fd190-a3ed-4279-97d3-e4759c29930d\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.364345 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7n56\" (UniqueName: \"kubernetes.io/projected/352fd190-a3ed-4279-97d3-e4759c29930d-kube-api-access-q7n56\") pod \"352fd190-a3ed-4279-97d3-e4759c29930d\" (UID: \"352fd190-a3ed-4279-97d3-e4759c29930d\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.365149 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/352fd190-a3ed-4279-97d3-e4759c29930d-logs" (OuterVolumeSpecName: "logs") pod "352fd190-a3ed-4279-97d3-e4759c29930d" (UID: "352fd190-a3ed-4279-97d3-e4759c29930d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.374776 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/352fd190-a3ed-4279-97d3-e4759c29930d-kube-api-access-q7n56" (OuterVolumeSpecName: "kube-api-access-q7n56") pod "352fd190-a3ed-4279-97d3-e4759c29930d" (UID: "352fd190-a3ed-4279-97d3-e4759c29930d"). InnerVolumeSpecName "kube-api-access-q7n56". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.392543 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-config-data" (OuterVolumeSpecName: "config-data") pod "352fd190-a3ed-4279-97d3-e4759c29930d" (UID: "352fd190-a3ed-4279-97d3-e4759c29930d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.409512 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "352fd190-a3ed-4279-97d3-e4759c29930d" (UID: "352fd190-a3ed-4279-97d3-e4759c29930d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.418259 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "352fd190-a3ed-4279-97d3-e4759c29930d" (UID: "352fd190-a3ed-4279-97d3-e4759c29930d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.418477 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "352fd190-a3ed-4279-97d3-e4759c29930d" (UID: "352fd190-a3ed-4279-97d3-e4759c29930d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466023 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-config-data\") pod \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466097 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-combined-ca-bundle\") pod \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466534 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb8mr\" (UniqueName: \"kubernetes.io/projected/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-kube-api-access-gb8mr\") pod \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\" (UID: \"8b192b8d-45cf-450f-a67d-a6f9b1a8326a\") " Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466963 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/352fd190-a3ed-4279-97d3-e4759c29930d-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466980 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466988 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.466999 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7n56\" (UniqueName: \"kubernetes.io/projected/352fd190-a3ed-4279-97d3-e4759c29930d-kube-api-access-q7n56\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.467007 4813 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.467035 4813 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/352fd190-a3ed-4279-97d3-e4759c29930d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.470432 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-kube-api-access-gb8mr" (OuterVolumeSpecName: "kube-api-access-gb8mr") pod "8b192b8d-45cf-450f-a67d-a6f9b1a8326a" (UID: "8b192b8d-45cf-450f-a67d-a6f9b1a8326a"). InnerVolumeSpecName "kube-api-access-gb8mr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.491673 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b192b8d-45cf-450f-a67d-a6f9b1a8326a" (UID: "8b192b8d-45cf-450f-a67d-a6f9b1a8326a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.494361 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-config-data" (OuterVolumeSpecName: "config-data") pod "8b192b8d-45cf-450f-a67d-a6f9b1a8326a" (UID: "8b192b8d-45cf-450f-a67d-a6f9b1a8326a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.570418 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb8mr\" (UniqueName: \"kubernetes.io/projected/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-kube-api-access-gb8mr\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.570624 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:04 crc kubenswrapper[4813]: I1007 19:38:04.570664 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b192b8d-45cf-450f-a67d-a6f9b1a8326a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.113927 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"352fd190-a3ed-4279-97d3-e4759c29930d","Type":"ContainerDied","Data":"c2ac42219f20c6c4f292fd0a0c0cb95df5d8b733b3b5abc3d93ef14196720e07"} Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.114392 4813 scope.go:117] "RemoveContainer" containerID="2252c3690ed0a39186e1b4b3f79dcb917d49b201001e609e98c5cd49d408930d" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.114625 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.121696 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b192b8d-45cf-450f-a67d-a6f9b1a8326a","Type":"ContainerDied","Data":"588682a67ff6eaa53211c684530bbc264cd37a57d0b66e19daa76e54ee502331"} Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.121796 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.162502 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.178868 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.191488 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.211381 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.215230 4813 scope.go:117] "RemoveContainer" containerID="5b9eb4ac22013152f826d9ed37a944e317adb615469e5ec585b4855bbdff05fb" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.218669 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: E1007 19:38:05.219048 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7b45b6-c0c5-43d8-be53-5278bf7fde77" containerName="nova-manage" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219065 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7b45b6-c0c5-43d8-be53-5278bf7fde77" containerName="nova-manage" Oct 07 19:38:05 crc kubenswrapper[4813]: E1007 19:38:05.219081 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b192b8d-45cf-450f-a67d-a6f9b1a8326a" containerName="nova-scheduler-scheduler" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219088 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b192b8d-45cf-450f-a67d-a6f9b1a8326a" containerName="nova-scheduler-scheduler" Oct 07 19:38:05 crc kubenswrapper[4813]: E1007 19:38:05.219100 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-api" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219106 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-api" Oct 07 19:38:05 crc kubenswrapper[4813]: E1007 19:38:05.219120 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerName="init" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219126 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerName="init" Oct 07 19:38:05 crc kubenswrapper[4813]: E1007 19:38:05.219149 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerName="dnsmasq-dns" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219166 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerName="dnsmasq-dns" Oct 07 19:38:05 crc kubenswrapper[4813]: E1007 19:38:05.219180 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-log" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219187 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-log" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219378 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7b45b6-c0c5-43d8-be53-5278bf7fde77" containerName="nova-manage" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219389 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b192b8d-45cf-450f-a67d-a6f9b1a8326a" containerName="nova-scheduler-scheduler" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219400 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-api" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219437 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" containerName="nova-api-log" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.219450 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b3b876a-cd89-4a2c-8179-74c000809b17" containerName="dnsmasq-dns" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.220551 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.223689 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.223926 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.224176 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.228165 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.242030 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.243732 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.249867 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.273148 4813 scope.go:117] "RemoveContainer" containerID="f186e0ec0ea8e5bcaa7770645837e872ace89680da3043803ae992e80d70793f" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.274924 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.387821 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt5pb\" (UniqueName: \"kubernetes.io/projected/efa771aa-3427-4b7e-b8a8-775222785447-kube-api-access-qt5pb\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.387894 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.387941 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-internal-tls-certs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.387984 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-public-tls-certs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.388026 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh6h5\" (UniqueName: \"kubernetes.io/projected/e032c1c7-6f6c-4265-9320-0500b815ec64-kube-api-access-vh6h5\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.388109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e032c1c7-6f6c-4265-9320-0500b815ec64-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.388180 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa771aa-3427-4b7e-b8a8-775222785447-logs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.388249 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e032c1c7-6f6c-4265-9320-0500b815ec64-config-data\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.388348 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-config-data\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489705 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt5pb\" (UniqueName: \"kubernetes.io/projected/efa771aa-3427-4b7e-b8a8-775222785447-kube-api-access-qt5pb\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489765 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489797 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-internal-tls-certs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489828 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-public-tls-certs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489858 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh6h5\" (UniqueName: \"kubernetes.io/projected/e032c1c7-6f6c-4265-9320-0500b815ec64-kube-api-access-vh6h5\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489920 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e032c1c7-6f6c-4265-9320-0500b815ec64-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.489968 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa771aa-3427-4b7e-b8a8-775222785447-logs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.490044 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e032c1c7-6f6c-4265-9320-0500b815ec64-config-data\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.490518 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/efa771aa-3427-4b7e-b8a8-775222785447-logs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.490690 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-config-data\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.495771 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e032c1c7-6f6c-4265-9320-0500b815ec64-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.496459 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e032c1c7-6f6c-4265-9320-0500b815ec64-config-data\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.496788 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-config-data\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.497741 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.500145 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-public-tls-certs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.505762 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/efa771aa-3427-4b7e-b8a8-775222785447-internal-tls-certs\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.508044 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh6h5\" (UniqueName: \"kubernetes.io/projected/e032c1c7-6f6c-4265-9320-0500b815ec64-kube-api-access-vh6h5\") pod \"nova-scheduler-0\" (UID: \"e032c1c7-6f6c-4265-9320-0500b815ec64\") " pod="openstack/nova-scheduler-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.512891 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt5pb\" (UniqueName: \"kubernetes.io/projected/efa771aa-3427-4b7e-b8a8-775222785447-kube-api-access-qt5pb\") pod \"nova-api-0\" (UID: \"efa771aa-3427-4b7e-b8a8-775222785447\") " pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.542225 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 07 19:38:05 crc kubenswrapper[4813]: I1007 19:38:05.566125 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.023586 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.097901 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 07 19:38:06 crc kubenswrapper[4813]: W1007 19:38:06.103721 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefa771aa_3427_4b7e_b8a8_775222785447.slice/crio-8dc6b00b6779a60b6a00e77d87e6ea0cba3ad1bf3667783ae8a1aea1b599e3ac WatchSource:0}: Error finding container 8dc6b00b6779a60b6a00e77d87e6ea0cba3ad1bf3667783ae8a1aea1b599e3ac: Status 404 returned error can't find the container with id 8dc6b00b6779a60b6a00e77d87e6ea0cba3ad1bf3667783ae8a1aea1b599e3ac Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.151139 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa771aa-3427-4b7e-b8a8-775222785447","Type":"ContainerStarted","Data":"8dc6b00b6779a60b6a00e77d87e6ea0cba3ad1bf3667783ae8a1aea1b599e3ac"} Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.155547 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e032c1c7-6f6c-4265-9320-0500b815ec64","Type":"ContainerStarted","Data":"d546ff7673c8c76adbb540b8c24a2fe4edba7c2661ef2a6e326b6ee215e1a27f"} Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.448963 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": read tcp 10.217.0.2:33224->10.217.0.194:8775: read: connection reset by peer" Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.449022 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": read tcp 10.217.0.2:33230->10.217.0.194:8775: read: connection reset by peer" Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.613398 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="352fd190-a3ed-4279-97d3-e4759c29930d" path="/var/lib/kubelet/pods/352fd190-a3ed-4279-97d3-e4759c29930d/volumes" Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.613947 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b192b8d-45cf-450f-a67d-a6f9b1a8326a" path="/var/lib/kubelet/pods/8b192b8d-45cf-450f-a67d-a6f9b1a8326a/volumes" Oct 07 19:38:06 crc kubenswrapper[4813]: I1007 19:38:06.879721 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.025940 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-combined-ca-bundle\") pod \"21847ab9-4438-4203-ba7e-de3231e5e3ba\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.026021 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57wk5\" (UniqueName: \"kubernetes.io/projected/21847ab9-4438-4203-ba7e-de3231e5e3ba-kube-api-access-57wk5\") pod \"21847ab9-4438-4203-ba7e-de3231e5e3ba\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.026054 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-nova-metadata-tls-certs\") pod \"21847ab9-4438-4203-ba7e-de3231e5e3ba\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.026174 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21847ab9-4438-4203-ba7e-de3231e5e3ba-logs\") pod \"21847ab9-4438-4203-ba7e-de3231e5e3ba\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.026200 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-config-data\") pod \"21847ab9-4438-4203-ba7e-de3231e5e3ba\" (UID: \"21847ab9-4438-4203-ba7e-de3231e5e3ba\") " Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.027425 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21847ab9-4438-4203-ba7e-de3231e5e3ba-logs" (OuterVolumeSpecName: "logs") pod "21847ab9-4438-4203-ba7e-de3231e5e3ba" (UID: "21847ab9-4438-4203-ba7e-de3231e5e3ba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.065516 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21847ab9-4438-4203-ba7e-de3231e5e3ba-kube-api-access-57wk5" (OuterVolumeSpecName: "kube-api-access-57wk5") pod "21847ab9-4438-4203-ba7e-de3231e5e3ba" (UID: "21847ab9-4438-4203-ba7e-de3231e5e3ba"). InnerVolumeSpecName "kube-api-access-57wk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.085414 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-config-data" (OuterVolumeSpecName: "config-data") pod "21847ab9-4438-4203-ba7e-de3231e5e3ba" (UID: "21847ab9-4438-4203-ba7e-de3231e5e3ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.115454 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21847ab9-4438-4203-ba7e-de3231e5e3ba" (UID: "21847ab9-4438-4203-ba7e-de3231e5e3ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.123699 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "21847ab9-4438-4203-ba7e-de3231e5e3ba" (UID: "21847ab9-4438-4203-ba7e-de3231e5e3ba"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.127871 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.127885 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.127895 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57wk5\" (UniqueName: \"kubernetes.io/projected/21847ab9-4438-4203-ba7e-de3231e5e3ba-kube-api-access-57wk5\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.127903 4813 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21847ab9-4438-4203-ba7e-de3231e5e3ba-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.127910 4813 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21847ab9-4438-4203-ba7e-de3231e5e3ba-logs\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.175730 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa771aa-3427-4b7e-b8a8-775222785447","Type":"ContainerStarted","Data":"07026c79034e9e3fb1838cd07174ef0772748675341cfcdde88ea7485926a2fb"} Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.177428 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"efa771aa-3427-4b7e-b8a8-775222785447","Type":"ContainerStarted","Data":"05c5a1a14d476671a9e5ffbd6a0b2dbbd118407ae9d5fe1ea86e05acf6186541"} Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.178024 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e032c1c7-6f6c-4265-9320-0500b815ec64","Type":"ContainerStarted","Data":"2dc002273c0396ecbaaee9c751ec00a7461ea2ed1e0325e3e4d5a2e569be2280"} Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.182500 4813 generic.go:334] "Generic (PLEG): container finished" podID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerID="3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e" exitCode=0 Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.182552 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21847ab9-4438-4203-ba7e-de3231e5e3ba","Type":"ContainerDied","Data":"3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e"} Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.182576 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21847ab9-4438-4203-ba7e-de3231e5e3ba","Type":"ContainerDied","Data":"8f178d168dfa4bae3575d39fc22e958df2a386315028b2dce134cb41bdc65968"} Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.182593 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.182599 4813 scope.go:117] "RemoveContainer" containerID="3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.198936 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.19891636 podStartE2EDuration="2.19891636s" podCreationTimestamp="2025-10-07 19:38:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:38:07.190729662 +0000 UTC m=+1213.268985273" watchObservedRunningTime="2025-10-07 19:38:07.19891636 +0000 UTC m=+1213.277171971" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.221062 4813 scope.go:117] "RemoveContainer" containerID="721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.228945 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.228925582 podStartE2EDuration="2.228925582s" podCreationTimestamp="2025-10-07 19:38:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:38:07.216650075 +0000 UTC m=+1213.294905686" watchObservedRunningTime="2025-10-07 19:38:07.228925582 +0000 UTC m=+1213.307181193" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.244647 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.253824 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.261763 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:38:07 crc kubenswrapper[4813]: E1007 19:38:07.262144 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-log" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.262161 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-log" Oct 07 19:38:07 crc kubenswrapper[4813]: E1007 19:38:07.262171 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-metadata" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.262178 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-metadata" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.262560 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-metadata" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.262583 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" containerName="nova-metadata-log" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.263558 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.270912 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.271186 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.277931 4813 scope.go:117] "RemoveContainer" containerID="3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e" Oct 07 19:38:07 crc kubenswrapper[4813]: E1007 19:38:07.281417 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e\": container with ID starting with 3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e not found: ID does not exist" containerID="3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.281453 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e"} err="failed to get container status \"3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e\": rpc error: code = NotFound desc = could not find container \"3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e\": container with ID starting with 3774a90ed4dfd4282ae04ca872b1755e9a7ff6509232ff16a9616af7724cb31e not found: ID does not exist" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.281476 4813 scope.go:117] "RemoveContainer" containerID="721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327" Oct 07 19:38:07 crc kubenswrapper[4813]: E1007 19:38:07.283438 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327\": container with ID starting with 721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327 not found: ID does not exist" containerID="721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.283502 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.283499 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327"} err="failed to get container status \"721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327\": rpc error: code = NotFound desc = could not find container \"721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327\": container with ID starting with 721f296a58bde182ae7e2680621b448ce614073a1526503d485d4b4ea4761327 not found: ID does not exist" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.432971 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.433453 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-config-data\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.433541 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.433763 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8ktj\" (UniqueName: \"kubernetes.io/projected/ee05e116-f577-4638-8c15-6fb6ff348eaf-kube-api-access-n8ktj\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.433851 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee05e116-f577-4638-8c15-6fb6ff348eaf-logs\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.535575 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8ktj\" (UniqueName: \"kubernetes.io/projected/ee05e116-f577-4638-8c15-6fb6ff348eaf-kube-api-access-n8ktj\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.535665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee05e116-f577-4638-8c15-6fb6ff348eaf-logs\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.535703 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.535755 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-config-data\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.535808 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.536927 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee05e116-f577-4638-8c15-6fb6ff348eaf-logs\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.544319 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-config-data\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.545709 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.545950 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee05e116-f577-4638-8c15-6fb6ff348eaf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.575832 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8ktj\" (UniqueName: \"kubernetes.io/projected/ee05e116-f577-4638-8c15-6fb6ff348eaf-kube-api-access-n8ktj\") pod \"nova-metadata-0\" (UID: \"ee05e116-f577-4638-8c15-6fb6ff348eaf\") " pod="openstack/nova-metadata-0" Oct 07 19:38:07 crc kubenswrapper[4813]: I1007 19:38:07.584484 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 07 19:38:08 crc kubenswrapper[4813]: I1007 19:38:08.020703 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 07 19:38:08 crc kubenswrapper[4813]: W1007 19:38:08.025446 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee05e116_f577_4638_8c15_6fb6ff348eaf.slice/crio-b7ce4e24386ea1ea0684b3027ec09f17c1b9cc9649e5c48176f7525ffc9305ce WatchSource:0}: Error finding container b7ce4e24386ea1ea0684b3027ec09f17c1b9cc9649e5c48176f7525ffc9305ce: Status 404 returned error can't find the container with id b7ce4e24386ea1ea0684b3027ec09f17c1b9cc9649e5c48176f7525ffc9305ce Oct 07 19:38:08 crc kubenswrapper[4813]: I1007 19:38:08.194126 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ee05e116-f577-4638-8c15-6fb6ff348eaf","Type":"ContainerStarted","Data":"b7ce4e24386ea1ea0684b3027ec09f17c1b9cc9649e5c48176f7525ffc9305ce"} Oct 07 19:38:08 crc kubenswrapper[4813]: I1007 19:38:08.616119 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21847ab9-4438-4203-ba7e-de3231e5e3ba" path="/var/lib/kubelet/pods/21847ab9-4438-4203-ba7e-de3231e5e3ba/volumes" Oct 07 19:38:09 crc kubenswrapper[4813]: I1007 19:38:09.207980 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ee05e116-f577-4638-8c15-6fb6ff348eaf","Type":"ContainerStarted","Data":"5a9e08c9a7b2ea31123c42fd9f38b48065ec7d3e70670409b51b62a4f656c701"} Oct 07 19:38:09 crc kubenswrapper[4813]: I1007 19:38:09.208286 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ee05e116-f577-4638-8c15-6fb6ff348eaf","Type":"ContainerStarted","Data":"7b71889743b9be7cdd7055cd154a101e30927f04322a91d4b4869e7e962a6b7d"} Oct 07 19:38:09 crc kubenswrapper[4813]: I1007 19:38:09.229706 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.229688925 podStartE2EDuration="2.229688925s" podCreationTimestamp="2025-10-07 19:38:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:38:09.223013411 +0000 UTC m=+1215.301269062" watchObservedRunningTime="2025-10-07 19:38:09.229688925 +0000 UTC m=+1215.307944536" Oct 07 19:38:10 crc kubenswrapper[4813]: I1007 19:38:10.566629 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 07 19:38:12 crc kubenswrapper[4813]: I1007 19:38:12.585828 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 19:38:12 crc kubenswrapper[4813]: I1007 19:38:12.586239 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 07 19:38:15 crc kubenswrapper[4813]: I1007 19:38:15.542592 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 19:38:15 crc kubenswrapper[4813]: I1007 19:38:15.544255 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 07 19:38:15 crc kubenswrapper[4813]: I1007 19:38:15.566818 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 07 19:38:15 crc kubenswrapper[4813]: I1007 19:38:15.595396 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 07 19:38:16 crc kubenswrapper[4813]: I1007 19:38:16.296251 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 07 19:38:16 crc kubenswrapper[4813]: I1007 19:38:16.557567 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="efa771aa-3427-4b7e-b8a8-775222785447" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 19:38:16 crc kubenswrapper[4813]: I1007 19:38:16.557564 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="efa771aa-3427-4b7e-b8a8-775222785447" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 19:38:17 crc kubenswrapper[4813]: I1007 19:38:17.585377 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 19:38:17 crc kubenswrapper[4813]: I1007 19:38:17.585426 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 07 19:38:18 crc kubenswrapper[4813]: I1007 19:38:18.597614 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ee05e116-f577-4638-8c15-6fb6ff348eaf" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 19:38:18 crc kubenswrapper[4813]: I1007 19:38:18.597653 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ee05e116-f577-4638-8c15-6fb6ff348eaf" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 07 19:38:21 crc kubenswrapper[4813]: I1007 19:38:21.298663 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 07 19:38:25 crc kubenswrapper[4813]: I1007 19:38:25.551203 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 19:38:25 crc kubenswrapper[4813]: I1007 19:38:25.552098 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 19:38:25 crc kubenswrapper[4813]: I1007 19:38:25.553540 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 07 19:38:25 crc kubenswrapper[4813]: I1007 19:38:25.560304 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 19:38:26 crc kubenswrapper[4813]: I1007 19:38:26.372103 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 07 19:38:26 crc kubenswrapper[4813]: I1007 19:38:26.382228 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 07 19:38:27 crc kubenswrapper[4813]: I1007 19:38:27.593560 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 19:38:27 crc kubenswrapper[4813]: I1007 19:38:27.598450 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 07 19:38:27 crc kubenswrapper[4813]: I1007 19:38:27.606247 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 19:38:28 crc kubenswrapper[4813]: I1007 19:38:28.405233 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 07 19:38:37 crc kubenswrapper[4813]: I1007 19:38:37.448794 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:38:38 crc kubenswrapper[4813]: I1007 19:38:38.412099 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:38:42 crc kubenswrapper[4813]: I1007 19:38:42.324190 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="rabbitmq" containerID="cri-o://3f356796aac90f5d750514cac672a095f975cec17e70141ca0af6d3bcaa55118" gracePeriod=604796 Oct 07 19:38:42 crc kubenswrapper[4813]: I1007 19:38:42.711506 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="rabbitmq" containerID="cri-o://5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825" gracePeriod=604796 Oct 07 19:38:45 crc kubenswrapper[4813]: I1007 19:38:45.179137 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Oct 07 19:38:45 crc kubenswrapper[4813]: I1007 19:38:45.603954 4813 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Oct 07 19:38:48 crc kubenswrapper[4813]: I1007 19:38:48.604621 4813 generic.go:334] "Generic (PLEG): container finished" podID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerID="3f356796aac90f5d750514cac672a095f975cec17e70141ca0af6d3bcaa55118" exitCode=0 Oct 07 19:38:48 crc kubenswrapper[4813]: I1007 19:38:48.612533 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b693f559-87e2-41ef-94c0-56d76bd9ef00","Type":"ContainerDied","Data":"3f356796aac90f5d750514cac672a095f975cec17e70141ca0af6d3bcaa55118"} Oct 07 19:38:48 crc kubenswrapper[4813]: I1007 19:38:48.995863 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032058 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-plugins-conf\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032111 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-confd\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032173 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-plugins\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032206 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b693f559-87e2-41ef-94c0-56d76bd9ef00-erlang-cookie-secret\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032229 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72lft\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-kube-api-access-72lft\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032284 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-erlang-cookie\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032406 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b693f559-87e2-41ef-94c0-56d76bd9ef00-pod-info\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.032440 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-tls\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.033119 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.033177 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.033229 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-config-data\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.033287 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-server-conf\") pod \"b693f559-87e2-41ef-94c0-56d76bd9ef00\" (UID: \"b693f559-87e2-41ef-94c0-56d76bd9ef00\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.033896 4813 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.035544 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.050671 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.057251 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b693f559-87e2-41ef-94c0-56d76bd9ef00-pod-info" (OuterVolumeSpecName: "pod-info") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.084314 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-kube-api-access-72lft" (OuterVolumeSpecName: "kube-api-access-72lft") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "kube-api-access-72lft". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.084504 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.085819 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b693f559-87e2-41ef-94c0-56d76bd9ef00-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.092252 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137375 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137404 4813 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b693f559-87e2-41ef-94c0-56d76bd9ef00-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137415 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72lft\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-kube-api-access-72lft\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137428 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137437 4813 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b693f559-87e2-41ef-94c0-56d76bd9ef00-pod-info\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137444 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.137469 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.171919 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.181300 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-server-conf" (OuterVolumeSpecName: "server-conf") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.184454 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-config-data" (OuterVolumeSpecName: "config-data") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.259745 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.259800 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.259869 4813 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b693f559-87e2-41ef-94c0-56d76bd9ef00-server-conf\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.274458 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.359136 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b693f559-87e2-41ef-94c0-56d76bd9ef00" (UID: "b693f559-87e2-41ef-94c0-56d76bd9ef00"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360682 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-config-data\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360726 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-plugins-conf\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360779 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmg87\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-kube-api-access-lmg87\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360820 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-plugins\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360874 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-pod-info\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360897 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.360942 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-erlang-cookie\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.361049 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-erlang-cookie-secret\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.361138 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-server-conf\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.361169 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-tls\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.361205 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-confd\") pod \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\" (UID: \"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49\") " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.361743 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b693f559-87e2-41ef-94c0-56d76bd9ef00-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.362388 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.364643 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.366526 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.377479 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.377608 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-kube-api-access-lmg87" (OuterVolumeSpecName: "kube-api-access-lmg87") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "kube-api-access-lmg87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.377660 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-pod-info" (OuterVolumeSpecName: "pod-info") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.377745 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.383374 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.412194 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-config-data" (OuterVolumeSpecName: "config-data") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.452725 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-server-conf" (OuterVolumeSpecName: "server-conf") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.462987 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmg87\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-kube-api-access-lmg87\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463025 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463035 4813 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-pod-info\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463064 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463088 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463100 4813 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463212 4813 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-server-conf\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463225 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463233 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.463241 4813 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.482965 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.531928 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" (UID: "f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.565000 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.565035 4813 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.616953 4813 generic.go:334] "Generic (PLEG): container finished" podID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerID="5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825" exitCode=0 Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.616999 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49","Type":"ContainerDied","Data":"5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825"} Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.617062 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49","Type":"ContainerDied","Data":"3dabc2c7af752113205f6554fe9dab36531f22855b747048d63c02bbae9eab7a"} Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.617079 4813 scope.go:117] "RemoveContainer" containerID="5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.618084 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.624884 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b693f559-87e2-41ef-94c0-56d76bd9ef00","Type":"ContainerDied","Data":"fb89fefcb27d4c77968203ac8d8a97b710bbefa2f187c404c12edeb144ed380b"} Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.625108 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.645121 4813 scope.go:117] "RemoveContainer" containerID="50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.686141 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.695973 4813 scope.go:117] "RemoveContainer" containerID="5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825" Oct 07 19:38:49 crc kubenswrapper[4813]: E1007 19:38:49.702170 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825\": container with ID starting with 5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825 not found: ID does not exist" containerID="5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.702247 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825"} err="failed to get container status \"5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825\": rpc error: code = NotFound desc = could not find container \"5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825\": container with ID starting with 5fd34c410b1ce23b7fdc5d314c6ac3232b2d45fce3749ebb23ac7924a8cdf825 not found: ID does not exist" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.702302 4813 scope.go:117] "RemoveContainer" containerID="50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646" Oct 07 19:38:49 crc kubenswrapper[4813]: E1007 19:38:49.702716 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646\": container with ID starting with 50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646 not found: ID does not exist" containerID="50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.702766 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646"} err="failed to get container status \"50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646\": rpc error: code = NotFound desc = could not find container \"50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646\": container with ID starting with 50d9950e55b85704912ea2548c4461d7d318790078044b1687ad09134d443646 not found: ID does not exist" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.702784 4813 scope.go:117] "RemoveContainer" containerID="3f356796aac90f5d750514cac672a095f975cec17e70141ca0af6d3bcaa55118" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.705598 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.724008 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.740603 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750216 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: E1007 19:38:49.750640 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="setup-container" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750656 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="setup-container" Oct 07 19:38:49 crc kubenswrapper[4813]: E1007 19:38:49.750667 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="rabbitmq" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750673 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="rabbitmq" Oct 07 19:38:49 crc kubenswrapper[4813]: E1007 19:38:49.750684 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="setup-container" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750692 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="setup-container" Oct 07 19:38:49 crc kubenswrapper[4813]: E1007 19:38:49.750715 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="rabbitmq" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750721 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="rabbitmq" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750882 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" containerName="rabbitmq" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.750894 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" containerName="rabbitmq" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.751930 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.752649 4813 scope.go:117] "RemoveContainer" containerID="98789f437cadf7459a98615391b7fd39c5b25d988dcb21b731b947d853a5d811" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.766485 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.768441 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.768833 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.769112 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-7pwf9" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.769236 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.769311 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.769653 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.769883 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.769449 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.777398 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779087 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779303 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779494 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779636 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779739 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779882 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4bdjx" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.779990 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.796818 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872047 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872106 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29nm9\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-kube-api-access-29nm9\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872128 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d486108c-7921-4770-81bf-b309787cbf5a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872144 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872160 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-config-data\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872179 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872202 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872765 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2412d699-edb6-474b-95da-eb29d703dfd4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872824 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872847 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c44x5\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-kube-api-access-c44x5\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.872994 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873052 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873129 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873149 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2412d699-edb6-474b-95da-eb29d703dfd4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873163 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873221 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873285 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d486108c-7921-4770-81bf-b309787cbf5a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873365 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873394 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873423 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.873439 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976089 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976245 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2412d699-edb6-474b-95da-eb29d703dfd4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976308 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976374 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c44x5\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-kube-api-access-c44x5\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976498 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976591 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976665 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976740 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2412d699-edb6-474b-95da-eb29d703dfd4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976778 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976814 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976857 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976876 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.976894 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d486108c-7921-4770-81bf-b309787cbf5a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977114 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977198 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977261 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977372 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977421 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977548 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29nm9\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-kube-api-access-29nm9\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977595 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d486108c-7921-4770-81bf-b309787cbf5a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977673 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977711 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-config-data\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977790 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977866 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.977930 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.978515 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.978720 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.978803 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.978831 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.979072 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.979450 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.980133 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d486108c-7921-4770-81bf-b309787cbf5a-config-data\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.980213 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.980742 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.981382 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.982599 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2412d699-edb6-474b-95da-eb29d703dfd4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.983929 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2412d699-edb6-474b-95da-eb29d703dfd4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.984726 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.986779 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d486108c-7921-4770-81bf-b309787cbf5a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.995089 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:49 crc kubenswrapper[4813]: I1007 19:38:49.995832 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2412d699-edb6-474b-95da-eb29d703dfd4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.000499 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d486108c-7921-4770-81bf-b309787cbf5a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.001101 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c44x5\" (UniqueName: \"kubernetes.io/projected/d486108c-7921-4770-81bf-b309787cbf5a-kube-api-access-c44x5\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.005070 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29nm9\" (UniqueName: \"kubernetes.io/projected/2412d699-edb6-474b-95da-eb29d703dfd4-kube-api-access-29nm9\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.036016 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"d486108c-7921-4770-81bf-b309787cbf5a\") " pod="openstack/rabbitmq-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.051739 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2412d699-edb6-474b-95da-eb29d703dfd4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.132223 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.143458 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.531228 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.615298 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b693f559-87e2-41ef-94c0-56d76bd9ef00" path="/var/lib/kubelet/pods/b693f559-87e2-41ef-94c0-56d76bd9ef00/volumes" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.616160 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49" path="/var/lib/kubelet/pods/f0a0c2fb-7867-45eb-9d70-8fc12a2dbf49/volumes" Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.636668 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d486108c-7921-4770-81bf-b309787cbf5a","Type":"ContainerStarted","Data":"642b4fe95c0664be7b5da569930dd0254ae998e1b8bdb87fefe63e387d87b190"} Oct 07 19:38:50 crc kubenswrapper[4813]: I1007 19:38:50.657617 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 07 19:38:50 crc kubenswrapper[4813]: W1007 19:38:50.667247 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2412d699_edb6_474b_95da_eb29d703dfd4.slice/crio-056290cb05d2af59fab4b0e9a7724c12d8307ee7aeb1e9a375b2354d0e1dd46c WatchSource:0}: Error finding container 056290cb05d2af59fab4b0e9a7724c12d8307ee7aeb1e9a375b2354d0e1dd46c: Status 404 returned error can't find the container with id 056290cb05d2af59fab4b0e9a7724c12d8307ee7aeb1e9a375b2354d0e1dd46c Oct 07 19:38:51 crc kubenswrapper[4813]: I1007 19:38:51.656708 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2412d699-edb6-474b-95da-eb29d703dfd4","Type":"ContainerStarted","Data":"056290cb05d2af59fab4b0e9a7724c12d8307ee7aeb1e9a375b2354d0e1dd46c"} Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.518115 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-dccvc"] Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.520783 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.523271 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.547620 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-dccvc"] Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.633639 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.633868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.633983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm8j5\" (UniqueName: \"kubernetes.io/projected/8a31e5f4-fed5-466e-968a-1ec243b44d4c-kube-api-access-hm8j5\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.634077 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-config\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.634174 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-svc\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.634460 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.634525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.668071 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2412d699-edb6-474b-95da-eb29d703dfd4","Type":"ContainerStarted","Data":"7a0400be6e5103b9c42553a3f10190443bbb5c8fd46209c5a42c4d10ae36bf96"} Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.670990 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d486108c-7921-4770-81bf-b309787cbf5a","Type":"ContainerStarted","Data":"aab71615570d4d96279304915c9673fe0027c660b4998b18a2237ab7a5768dbd"} Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.736637 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.736706 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.736797 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm8j5\" (UniqueName: \"kubernetes.io/projected/8a31e5f4-fed5-466e-968a-1ec243b44d4c-kube-api-access-hm8j5\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.736824 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-config\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.736858 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-svc\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.736975 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.737019 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.738338 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.738435 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-svc\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.738475 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.738872 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.739012 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-config\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.739017 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.762626 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm8j5\" (UniqueName: \"kubernetes.io/projected/8a31e5f4-fed5-466e-968a-1ec243b44d4c-kube-api-access-hm8j5\") pod \"dnsmasq-dns-67b789f86c-dccvc\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:52 crc kubenswrapper[4813]: I1007 19:38:52.839431 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:53 crc kubenswrapper[4813]: I1007 19:38:53.332339 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-dccvc"] Oct 07 19:38:53 crc kubenswrapper[4813]: I1007 19:38:53.680153 4813 generic.go:334] "Generic (PLEG): container finished" podID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerID="865a67b26bb514265558f7b9e701dc9cf65ab90d990f6b2349976b6713edc064" exitCode=0 Oct 07 19:38:53 crc kubenswrapper[4813]: I1007 19:38:53.680202 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" event={"ID":"8a31e5f4-fed5-466e-968a-1ec243b44d4c","Type":"ContainerDied","Data":"865a67b26bb514265558f7b9e701dc9cf65ab90d990f6b2349976b6713edc064"} Oct 07 19:38:53 crc kubenswrapper[4813]: I1007 19:38:53.680580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" event={"ID":"8a31e5f4-fed5-466e-968a-1ec243b44d4c","Type":"ContainerStarted","Data":"0f006a9e2b9b229182df815ee62febe6b62723a319a27456f338a6ca23be2c46"} Oct 07 19:38:54 crc kubenswrapper[4813]: I1007 19:38:54.692617 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" event={"ID":"8a31e5f4-fed5-466e-968a-1ec243b44d4c","Type":"ContainerStarted","Data":"05f60abc9a3a724c847e8a650c7d96e9048f65ad5daf7ea283f074d8432c8a84"} Oct 07 19:38:54 crc kubenswrapper[4813]: I1007 19:38:54.693020 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:38:54 crc kubenswrapper[4813]: I1007 19:38:54.715985 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" podStartSLOduration=2.715970541 podStartE2EDuration="2.715970541s" podCreationTimestamp="2025-10-07 19:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:38:54.713261122 +0000 UTC m=+1260.791516733" watchObservedRunningTime="2025-10-07 19:38:54.715970541 +0000 UTC m=+1260.794226152" Oct 07 19:39:02 crc kubenswrapper[4813]: I1007 19:39:02.841512 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:39:02 crc kubenswrapper[4813]: I1007 19:39:02.922195 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mlwlg"] Oct 07 19:39:02 crc kubenswrapper[4813]: I1007 19:39:02.922496 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerName="dnsmasq-dns" containerID="cri-o://672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57" gracePeriod=10 Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.101030 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc556cf6f-lzpcx"] Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.103292 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.124915 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc556cf6f-lzpcx"] Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.175155 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-openstack-edpm-ipam\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.175356 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-dns-swift-storage-0\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.175430 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.175678 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt7fr\" (UniqueName: \"kubernetes.io/projected/985bc25a-aeea-4538-bbfe-e2461641e594-kube-api-access-zt7fr\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.175884 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-dns-svc\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.175961 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-config\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.176034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280484 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt7fr\" (UniqueName: \"kubernetes.io/projected/985bc25a-aeea-4538-bbfe-e2461641e594-kube-api-access-zt7fr\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280572 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-dns-svc\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280597 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-config\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280623 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280651 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-openstack-edpm-ipam\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280685 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-dns-swift-storage-0\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.280705 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.281508 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-config\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.281675 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.281802 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-dns-svc\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.282135 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-openstack-edpm-ipam\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.282236 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-dns-swift-storage-0\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.283235 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985bc25a-aeea-4538-bbfe-e2461641e594-ovsdbserver-nb\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.326083 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt7fr\" (UniqueName: \"kubernetes.io/projected/985bc25a-aeea-4538-bbfe-e2461641e594-kube-api-access-zt7fr\") pod \"dnsmasq-dns-6bc556cf6f-lzpcx\" (UID: \"985bc25a-aeea-4538-bbfe-e2461641e594\") " pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.419024 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.507245 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.588339 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-config\") pod \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.588412 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-swift-storage-0\") pod \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.588441 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-nb\") pod \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.588470 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-svc\") pod \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.588495 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-sb\") pod \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.588700 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc2ns\" (UniqueName: \"kubernetes.io/projected/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-kube-api-access-vc2ns\") pod \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\" (UID: \"bd8428c7-dcb9-48eb-977f-61453fbb4dc2\") " Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.600365 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-kube-api-access-vc2ns" (OuterVolumeSpecName: "kube-api-access-vc2ns") pod "bd8428c7-dcb9-48eb-977f-61453fbb4dc2" (UID: "bd8428c7-dcb9-48eb-977f-61453fbb4dc2"). InnerVolumeSpecName "kube-api-access-vc2ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.658309 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bd8428c7-dcb9-48eb-977f-61453fbb4dc2" (UID: "bd8428c7-dcb9-48eb-977f-61453fbb4dc2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.664545 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-config" (OuterVolumeSpecName: "config") pod "bd8428c7-dcb9-48eb-977f-61453fbb4dc2" (UID: "bd8428c7-dcb9-48eb-977f-61453fbb4dc2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.670837 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bd8428c7-dcb9-48eb-977f-61453fbb4dc2" (UID: "bd8428c7-dcb9-48eb-977f-61453fbb4dc2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.674947 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bd8428c7-dcb9-48eb-977f-61453fbb4dc2" (UID: "bd8428c7-dcb9-48eb-977f-61453fbb4dc2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.684113 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bd8428c7-dcb9-48eb-977f-61453fbb4dc2" (UID: "bd8428c7-dcb9-48eb-977f-61453fbb4dc2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.690579 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc2ns\" (UniqueName: \"kubernetes.io/projected/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-kube-api-access-vc2ns\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.690602 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.690612 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.690620 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.690629 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.690636 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd8428c7-dcb9-48eb-977f-61453fbb4dc2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.804561 4813 generic.go:334] "Generic (PLEG): container finished" podID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerID="672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57" exitCode=0 Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.804594 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" event={"ID":"bd8428c7-dcb9-48eb-977f-61453fbb4dc2","Type":"ContainerDied","Data":"672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57"} Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.804643 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" event={"ID":"bd8428c7-dcb9-48eb-977f-61453fbb4dc2","Type":"ContainerDied","Data":"c9ae81428039940a46e04b2e17c564994726bd7cfcaaed62233aba7ff7713b48"} Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.804639 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-mlwlg" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.804691 4813 scope.go:117] "RemoveContainer" containerID="672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.839665 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mlwlg"] Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.841114 4813 scope.go:117] "RemoveContainer" containerID="784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.848540 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-mlwlg"] Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.881489 4813 scope.go:117] "RemoveContainer" containerID="672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57" Oct 07 19:39:03 crc kubenswrapper[4813]: E1007 19:39:03.886633 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57\": container with ID starting with 672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57 not found: ID does not exist" containerID="672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.886671 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57"} err="failed to get container status \"672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57\": rpc error: code = NotFound desc = could not find container \"672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57\": container with ID starting with 672092540a1dbbb188d1970d6bd4960b4d1b550299629cfc8ed53ae732a8fd57 not found: ID does not exist" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.886692 4813 scope.go:117] "RemoveContainer" containerID="784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790" Oct 07 19:39:03 crc kubenswrapper[4813]: E1007 19:39:03.886968 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790\": container with ID starting with 784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790 not found: ID does not exist" containerID="784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.886990 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790"} err="failed to get container status \"784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790\": rpc error: code = NotFound desc = could not find container \"784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790\": container with ID starting with 784bfc800ae42a9dc60a5722a234d2cc4b982753dc9e6dcaeb2d6836f0786790 not found: ID does not exist" Oct 07 19:39:03 crc kubenswrapper[4813]: I1007 19:39:03.897997 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc556cf6f-lzpcx"] Oct 07 19:39:03 crc kubenswrapper[4813]: W1007 19:39:03.906040 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod985bc25a_aeea_4538_bbfe_e2461641e594.slice/crio-667217a034ed9f51b1f1744c1f8a09e776970c1f1d9568fffcf2f88470c4405b WatchSource:0}: Error finding container 667217a034ed9f51b1f1744c1f8a09e776970c1f1d9568fffcf2f88470c4405b: Status 404 returned error can't find the container with id 667217a034ed9f51b1f1744c1f8a09e776970c1f1d9568fffcf2f88470c4405b Oct 07 19:39:04 crc kubenswrapper[4813]: I1007 19:39:04.615065 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" path="/var/lib/kubelet/pods/bd8428c7-dcb9-48eb-977f-61453fbb4dc2/volumes" Oct 07 19:39:04 crc kubenswrapper[4813]: I1007 19:39:04.822399 4813 generic.go:334] "Generic (PLEG): container finished" podID="985bc25a-aeea-4538-bbfe-e2461641e594" containerID="402ac98902cc2fc5880c0ae2c40cd2f2506b50bb46eccf157ffb3b0b7a486fa9" exitCode=0 Oct 07 19:39:04 crc kubenswrapper[4813]: I1007 19:39:04.823485 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" event={"ID":"985bc25a-aeea-4538-bbfe-e2461641e594","Type":"ContainerDied","Data":"402ac98902cc2fc5880c0ae2c40cd2f2506b50bb46eccf157ffb3b0b7a486fa9"} Oct 07 19:39:04 crc kubenswrapper[4813]: I1007 19:39:04.823536 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" event={"ID":"985bc25a-aeea-4538-bbfe-e2461641e594","Type":"ContainerStarted","Data":"667217a034ed9f51b1f1744c1f8a09e776970c1f1d9568fffcf2f88470c4405b"} Oct 07 19:39:05 crc kubenswrapper[4813]: I1007 19:39:05.851467 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" event={"ID":"985bc25a-aeea-4538-bbfe-e2461641e594","Type":"ContainerStarted","Data":"14a557714975c8600a2e158c0450a35388dfdc89bed51113bf6f73139ca6c116"} Oct 07 19:39:05 crc kubenswrapper[4813]: I1007 19:39:05.852337 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:05 crc kubenswrapper[4813]: I1007 19:39:05.886608 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" podStartSLOduration=2.8865891 podStartE2EDuration="2.8865891s" podCreationTimestamp="2025-10-07 19:39:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:39:05.876153104 +0000 UTC m=+1271.954408715" watchObservedRunningTime="2025-10-07 19:39:05.8865891 +0000 UTC m=+1271.964844721" Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.421588 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc556cf6f-lzpcx" Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.506966 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-dccvc"] Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.507238 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerName="dnsmasq-dns" containerID="cri-o://05f60abc9a3a724c847e8a650c7d96e9048f65ad5daf7ea283f074d8432c8a84" gracePeriod=10 Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.930302 4813 generic.go:334] "Generic (PLEG): container finished" podID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerID="05f60abc9a3a724c847e8a650c7d96e9048f65ad5daf7ea283f074d8432c8a84" exitCode=0 Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.930431 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" event={"ID":"8a31e5f4-fed5-466e-968a-1ec243b44d4c","Type":"ContainerDied","Data":"05f60abc9a3a724c847e8a650c7d96e9048f65ad5daf7ea283f074d8432c8a84"} Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.930736 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" event={"ID":"8a31e5f4-fed5-466e-968a-1ec243b44d4c","Type":"ContainerDied","Data":"0f006a9e2b9b229182df815ee62febe6b62723a319a27456f338a6ca23be2c46"} Oct 07 19:39:13 crc kubenswrapper[4813]: I1007 19:39:13.930751 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f006a9e2b9b229182df815ee62febe6b62723a319a27456f338a6ca23be2c46" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.007008 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.104770 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hm8j5\" (UniqueName: \"kubernetes.io/projected/8a31e5f4-fed5-466e-968a-1ec243b44d4c-kube-api-access-hm8j5\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.104865 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-swift-storage-0\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.104898 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-openstack-edpm-ipam\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.104938 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-config\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.104982 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-nb\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.105029 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-svc\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.105182 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-sb\") pod \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\" (UID: \"8a31e5f4-fed5-466e-968a-1ec243b44d4c\") " Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.111005 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a31e5f4-fed5-466e-968a-1ec243b44d4c-kube-api-access-hm8j5" (OuterVolumeSpecName: "kube-api-access-hm8j5") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "kube-api-access-hm8j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.162309 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.165005 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.168856 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.182399 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.189634 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-config" (OuterVolumeSpecName: "config") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.208003 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.208235 4813 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-config\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.208363 4813 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.208455 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.208537 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hm8j5\" (UniqueName: \"kubernetes.io/projected/8a31e5f4-fed5-466e-968a-1ec243b44d4c-kube-api-access-hm8j5\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.208615 4813 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.217881 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8a31e5f4-fed5-466e-968a-1ec243b44d4c" (UID: "8a31e5f4-fed5-466e-968a-1ec243b44d4c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.310439 4813 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a31e5f4-fed5-466e-968a-1ec243b44d4c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.938236 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-dccvc" Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.969061 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-dccvc"] Oct 07 19:39:14 crc kubenswrapper[4813]: I1007 19:39:14.980647 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-dccvc"] Oct 07 19:39:16 crc kubenswrapper[4813]: I1007 19:39:16.630949 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" path="/var/lib/kubelet/pods/8a31e5f4-fed5-466e-968a-1ec243b44d4c/volumes" Oct 07 19:39:22 crc kubenswrapper[4813]: I1007 19:39:22.078989 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:39:22 crc kubenswrapper[4813]: I1007 19:39:22.079582 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:39:25 crc kubenswrapper[4813]: I1007 19:39:25.053257 4813 generic.go:334] "Generic (PLEG): container finished" podID="2412d699-edb6-474b-95da-eb29d703dfd4" containerID="7a0400be6e5103b9c42553a3f10190443bbb5c8fd46209c5a42c4d10ae36bf96" exitCode=0 Oct 07 19:39:25 crc kubenswrapper[4813]: I1007 19:39:25.053404 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2412d699-edb6-474b-95da-eb29d703dfd4","Type":"ContainerDied","Data":"7a0400be6e5103b9c42553a3f10190443bbb5c8fd46209c5a42c4d10ae36bf96"} Oct 07 19:39:25 crc kubenswrapper[4813]: I1007 19:39:25.073815 4813 generic.go:334] "Generic (PLEG): container finished" podID="d486108c-7921-4770-81bf-b309787cbf5a" containerID="aab71615570d4d96279304915c9673fe0027c660b4998b18a2237ab7a5768dbd" exitCode=0 Oct 07 19:39:25 crc kubenswrapper[4813]: I1007 19:39:25.074210 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d486108c-7921-4770-81bf-b309787cbf5a","Type":"ContainerDied","Data":"aab71615570d4d96279304915c9673fe0027c660b4998b18a2237ab7a5768dbd"} Oct 07 19:39:26 crc kubenswrapper[4813]: I1007 19:39:26.085882 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2412d699-edb6-474b-95da-eb29d703dfd4","Type":"ContainerStarted","Data":"dd86398f94ca8e05d6ed3ac17c7bb226bd0595b53dd6bf7e1c43bf537429de5d"} Oct 07 19:39:26 crc kubenswrapper[4813]: I1007 19:39:26.086402 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:39:26 crc kubenswrapper[4813]: I1007 19:39:26.088360 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d486108c-7921-4770-81bf-b309787cbf5a","Type":"ContainerStarted","Data":"5d655fbddd5cfa1ec3521c03a9c1de2b640ff1368fbc60fa0c752c0dcb0b22c5"} Oct 07 19:39:26 crc kubenswrapper[4813]: I1007 19:39:26.088550 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 07 19:39:26 crc kubenswrapper[4813]: I1007 19:39:26.109335 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.109302021 podStartE2EDuration="37.109302021s" podCreationTimestamp="2025-10-07 19:38:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:39:26.105975144 +0000 UTC m=+1292.184230765" watchObservedRunningTime="2025-10-07 19:39:26.109302021 +0000 UTC m=+1292.187557632" Oct 07 19:39:26 crc kubenswrapper[4813]: I1007 19:39:26.138721 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.138702292 podStartE2EDuration="37.138702292s" podCreationTimestamp="2025-10-07 19:38:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 19:39:26.131511262 +0000 UTC m=+1292.209766873" watchObservedRunningTime="2025-10-07 19:39:26.138702292 +0000 UTC m=+1292.216957903" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.587191 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz"] Oct 07 19:39:37 crc kubenswrapper[4813]: E1007 19:39:37.588301 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerName="dnsmasq-dns" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.588342 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerName="dnsmasq-dns" Oct 07 19:39:37 crc kubenswrapper[4813]: E1007 19:39:37.588360 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerName="init" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.588368 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerName="init" Oct 07 19:39:37 crc kubenswrapper[4813]: E1007 19:39:37.588389 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerName="dnsmasq-dns" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.588399 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerName="dnsmasq-dns" Oct 07 19:39:37 crc kubenswrapper[4813]: E1007 19:39:37.588432 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerName="init" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.588439 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerName="init" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.588649 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd8428c7-dcb9-48eb-977f-61453fbb4dc2" containerName="dnsmasq-dns" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.588672 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a31e5f4-fed5-466e-968a-1ec243b44d4c" containerName="dnsmasq-dns" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.589506 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.593456 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.593884 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.593951 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.597980 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.611505 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz"] Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.681123 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.681234 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xsvr\" (UniqueName: \"kubernetes.io/projected/f558bb4b-742a-4c7d-bad0-ce2356b9765c-kube-api-access-8xsvr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.681367 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.681569 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.783265 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.783380 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.783449 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xsvr\" (UniqueName: \"kubernetes.io/projected/f558bb4b-742a-4c7d-bad0-ce2356b9765c-kube-api-access-8xsvr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.783510 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.790298 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.790972 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.791436 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.801834 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xsvr\" (UniqueName: \"kubernetes.io/projected/f558bb4b-742a-4c7d-bad0-ce2356b9765c-kube-api-access-8xsvr\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:37 crc kubenswrapper[4813]: I1007 19:39:37.914827 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:39:38 crc kubenswrapper[4813]: I1007 19:39:38.763733 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz"] Oct 07 19:39:39 crc kubenswrapper[4813]: I1007 19:39:39.231782 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" event={"ID":"f558bb4b-742a-4c7d-bad0-ce2356b9765c","Type":"ContainerStarted","Data":"e4b9db51f6517cf3e421e97a7c34e5cb74c9e228f7ee91fdb81626b7430c6a05"} Oct 07 19:39:40 crc kubenswrapper[4813]: I1007 19:39:40.138455 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 07 19:39:40 crc kubenswrapper[4813]: I1007 19:39:40.151485 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 07 19:39:49 crc kubenswrapper[4813]: I1007 19:39:49.344850 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" event={"ID":"f558bb4b-742a-4c7d-bad0-ce2356b9765c","Type":"ContainerStarted","Data":"57d06ece30c2928d5b4da7fde4d2ee70dffe4ead42d9ca20706a266543ca4b5c"} Oct 07 19:39:52 crc kubenswrapper[4813]: I1007 19:39:52.079478 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:39:52 crc kubenswrapper[4813]: I1007 19:39:52.079737 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:40:00 crc kubenswrapper[4813]: I1007 19:40:00.505996 4813 generic.go:334] "Generic (PLEG): container finished" podID="f558bb4b-742a-4c7d-bad0-ce2356b9765c" containerID="57d06ece30c2928d5b4da7fde4d2ee70dffe4ead42d9ca20706a266543ca4b5c" exitCode=0 Oct 07 19:40:00 crc kubenswrapper[4813]: I1007 19:40:00.506057 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" event={"ID":"f558bb4b-742a-4c7d-bad0-ce2356b9765c","Type":"ContainerDied","Data":"57d06ece30c2928d5b4da7fde4d2ee70dffe4ead42d9ca20706a266543ca4b5c"} Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.310068 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.462934 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xsvr\" (UniqueName: \"kubernetes.io/projected/f558bb4b-742a-4c7d-bad0-ce2356b9765c-kube-api-access-8xsvr\") pod \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.463266 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-inventory\") pod \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.463358 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-ssh-key\") pod \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.463435 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-repo-setup-combined-ca-bundle\") pod \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\" (UID: \"f558bb4b-742a-4c7d-bad0-ce2356b9765c\") " Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.470165 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f558bb4b-742a-4c7d-bad0-ce2356b9765c-kube-api-access-8xsvr" (OuterVolumeSpecName: "kube-api-access-8xsvr") pod "f558bb4b-742a-4c7d-bad0-ce2356b9765c" (UID: "f558bb4b-742a-4c7d-bad0-ce2356b9765c"). InnerVolumeSpecName "kube-api-access-8xsvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.470273 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f558bb4b-742a-4c7d-bad0-ce2356b9765c" (UID: "f558bb4b-742a-4c7d-bad0-ce2356b9765c"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.498513 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-inventory" (OuterVolumeSpecName: "inventory") pod "f558bb4b-742a-4c7d-bad0-ce2356b9765c" (UID: "f558bb4b-742a-4c7d-bad0-ce2356b9765c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.498900 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f558bb4b-742a-4c7d-bad0-ce2356b9765c" (UID: "f558bb4b-742a-4c7d-bad0-ce2356b9765c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.531387 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" event={"ID":"f558bb4b-742a-4c7d-bad0-ce2356b9765c","Type":"ContainerDied","Data":"e4b9db51f6517cf3e421e97a7c34e5cb74c9e228f7ee91fdb81626b7430c6a05"} Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.531431 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4b9db51f6517cf3e421e97a7c34e5cb74c9e228f7ee91fdb81626b7430c6a05" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.531490 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.565914 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xsvr\" (UniqueName: \"kubernetes.io/projected/f558bb4b-742a-4c7d-bad0-ce2356b9765c-kube-api-access-8xsvr\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.565962 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.565976 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.565988 4813 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f558bb4b-742a-4c7d-bad0-ce2356b9765c-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.639731 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g"] Oct 07 19:40:02 crc kubenswrapper[4813]: E1007 19:40:02.640175 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f558bb4b-742a-4c7d-bad0-ce2356b9765c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.640198 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f558bb4b-742a-4c7d-bad0-ce2356b9765c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.640481 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f558bb4b-742a-4c7d-bad0-ce2356b9765c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.641271 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.644473 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.645003 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.645641 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.645868 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.661782 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g"] Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.770186 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.770300 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.770363 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vfss\" (UniqueName: \"kubernetes.io/projected/947e90ca-70e6-4956-a58b-06c3faf10445-kube-api-access-4vfss\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.872830 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.872883 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.872904 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vfss\" (UniqueName: \"kubernetes.io/projected/947e90ca-70e6-4956-a58b-06c3faf10445-kube-api-access-4vfss\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.876650 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.881052 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.888178 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vfss\" (UniqueName: \"kubernetes.io/projected/947e90ca-70e6-4956-a58b-06c3faf10445-kube-api-access-4vfss\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-tdk5g\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:02 crc kubenswrapper[4813]: I1007 19:40:02.959442 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:03 crc kubenswrapper[4813]: I1007 19:40:03.508042 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g"] Oct 07 19:40:03 crc kubenswrapper[4813]: W1007 19:40:03.513543 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod947e90ca_70e6_4956_a58b_06c3faf10445.slice/crio-78bbfbad9872c5c735d13c142cd9b9b9e4a5ce6bdf967de7cf1a64e4046a8ad5 WatchSource:0}: Error finding container 78bbfbad9872c5c735d13c142cd9b9b9e4a5ce6bdf967de7cf1a64e4046a8ad5: Status 404 returned error can't find the container with id 78bbfbad9872c5c735d13c142cd9b9b9e4a5ce6bdf967de7cf1a64e4046a8ad5 Oct 07 19:40:03 crc kubenswrapper[4813]: I1007 19:40:03.545510 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" event={"ID":"947e90ca-70e6-4956-a58b-06c3faf10445","Type":"ContainerStarted","Data":"78bbfbad9872c5c735d13c142cd9b9b9e4a5ce6bdf967de7cf1a64e4046a8ad5"} Oct 07 19:40:04 crc kubenswrapper[4813]: I1007 19:40:04.557814 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" event={"ID":"947e90ca-70e6-4956-a58b-06c3faf10445","Type":"ContainerStarted","Data":"d45cb89cb9b111c0db8d361e3a10b2d433dfd8ddaffe90e77fdb1f33a866070e"} Oct 07 19:40:04 crc kubenswrapper[4813]: I1007 19:40:04.582855 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" podStartSLOduration=1.973406447 podStartE2EDuration="2.582841252s" podCreationTimestamp="2025-10-07 19:40:02 +0000 UTC" firstStartedPulling="2025-10-07 19:40:03.518005073 +0000 UTC m=+1329.596260694" lastFinishedPulling="2025-10-07 19:40:04.127439878 +0000 UTC m=+1330.205695499" observedRunningTime="2025-10-07 19:40:04.581669446 +0000 UTC m=+1330.659925057" watchObservedRunningTime="2025-10-07 19:40:04.582841252 +0000 UTC m=+1330.661096863" Oct 07 19:40:06 crc kubenswrapper[4813]: I1007 19:40:06.980599 4813 scope.go:117] "RemoveContainer" containerID="b3ddda03f9d6d81d98bac55b702ac2918b76da0ab10ee0ff2e7fdca09cca212e" Oct 07 19:40:07 crc kubenswrapper[4813]: I1007 19:40:07.015392 4813 scope.go:117] "RemoveContainer" containerID="23a40ebcb20e53085f5983ca43357fa0f9d66710836a6e3c2f505e3bebc4f0f3" Oct 07 19:40:07 crc kubenswrapper[4813]: I1007 19:40:07.586842 4813 generic.go:334] "Generic (PLEG): container finished" podID="947e90ca-70e6-4956-a58b-06c3faf10445" containerID="d45cb89cb9b111c0db8d361e3a10b2d433dfd8ddaffe90e77fdb1f33a866070e" exitCode=0 Oct 07 19:40:07 crc kubenswrapper[4813]: I1007 19:40:07.586905 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" event={"ID":"947e90ca-70e6-4956-a58b-06c3faf10445","Type":"ContainerDied","Data":"d45cb89cb9b111c0db8d361e3a10b2d433dfd8ddaffe90e77fdb1f33a866070e"} Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.036469 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.088083 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vfss\" (UniqueName: \"kubernetes.io/projected/947e90ca-70e6-4956-a58b-06c3faf10445-kube-api-access-4vfss\") pod \"947e90ca-70e6-4956-a58b-06c3faf10445\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.088207 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-ssh-key\") pod \"947e90ca-70e6-4956-a58b-06c3faf10445\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.088245 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-inventory\") pod \"947e90ca-70e6-4956-a58b-06c3faf10445\" (UID: \"947e90ca-70e6-4956-a58b-06c3faf10445\") " Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.093877 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/947e90ca-70e6-4956-a58b-06c3faf10445-kube-api-access-4vfss" (OuterVolumeSpecName: "kube-api-access-4vfss") pod "947e90ca-70e6-4956-a58b-06c3faf10445" (UID: "947e90ca-70e6-4956-a58b-06c3faf10445"). InnerVolumeSpecName "kube-api-access-4vfss". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.124672 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-inventory" (OuterVolumeSpecName: "inventory") pod "947e90ca-70e6-4956-a58b-06c3faf10445" (UID: "947e90ca-70e6-4956-a58b-06c3faf10445"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.131488 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "947e90ca-70e6-4956-a58b-06c3faf10445" (UID: "947e90ca-70e6-4956-a58b-06c3faf10445"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.190515 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.190672 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/947e90ca-70e6-4956-a58b-06c3faf10445-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.190728 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vfss\" (UniqueName: \"kubernetes.io/projected/947e90ca-70e6-4956-a58b-06c3faf10445-kube-api-access-4vfss\") on node \"crc\" DevicePath \"\"" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.613282 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" event={"ID":"947e90ca-70e6-4956-a58b-06c3faf10445","Type":"ContainerDied","Data":"78bbfbad9872c5c735d13c142cd9b9b9e4a5ce6bdf967de7cf1a64e4046a8ad5"} Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.613635 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78bbfbad9872c5c735d13c142cd9b9b9e4a5ce6bdf967de7cf1a64e4046a8ad5" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.613711 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-tdk5g" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.714777 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92"] Oct 07 19:40:09 crc kubenswrapper[4813]: E1007 19:40:09.715154 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947e90ca-70e6-4956-a58b-06c3faf10445" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.715170 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="947e90ca-70e6-4956-a58b-06c3faf10445" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.715393 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="947e90ca-70e6-4956-a58b-06c3faf10445" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.715988 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.718296 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.719210 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.720456 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.720600 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.731581 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92"] Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.823288 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.823588 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.823662 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4xx5\" (UniqueName: \"kubernetes.io/projected/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-kube-api-access-d4xx5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.823789 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.925936 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.926026 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.926155 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.926179 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4xx5\" (UniqueName: \"kubernetes.io/projected/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-kube-api-access-d4xx5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.930994 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.931722 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.932730 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:09 crc kubenswrapper[4813]: I1007 19:40:09.946306 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4xx5\" (UniqueName: \"kubernetes.io/projected/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-kube-api-access-d4xx5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:10 crc kubenswrapper[4813]: I1007 19:40:10.034265 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:40:10 crc kubenswrapper[4813]: I1007 19:40:10.641301 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92"] Oct 07 19:40:10 crc kubenswrapper[4813]: W1007 19:40:10.644260 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf58a4cdc_b5b0_421f_bd28_6c46f3d99af3.slice/crio-42139f0113a681019456bd7d22ef6b17b67cc06205fe6d83ce332526fd9f37f7 WatchSource:0}: Error finding container 42139f0113a681019456bd7d22ef6b17b67cc06205fe6d83ce332526fd9f37f7: Status 404 returned error can't find the container with id 42139f0113a681019456bd7d22ef6b17b67cc06205fe6d83ce332526fd9f37f7 Oct 07 19:40:11 crc kubenswrapper[4813]: I1007 19:40:11.635676 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" event={"ID":"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3","Type":"ContainerStarted","Data":"ff9babf3669b93e5f33469c4e8f48a6e275d4880b16e1cff940733f9b32a332c"} Oct 07 19:40:11 crc kubenswrapper[4813]: I1007 19:40:11.636216 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" event={"ID":"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3","Type":"ContainerStarted","Data":"42139f0113a681019456bd7d22ef6b17b67cc06205fe6d83ce332526fd9f37f7"} Oct 07 19:40:11 crc kubenswrapper[4813]: I1007 19:40:11.660914 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" podStartSLOduration=2.2501508599999998 podStartE2EDuration="2.660892956s" podCreationTimestamp="2025-10-07 19:40:09 +0000 UTC" firstStartedPulling="2025-10-07 19:40:10.647955517 +0000 UTC m=+1336.726211148" lastFinishedPulling="2025-10-07 19:40:11.058697633 +0000 UTC m=+1337.136953244" observedRunningTime="2025-10-07 19:40:11.653976094 +0000 UTC m=+1337.732231715" watchObservedRunningTime="2025-10-07 19:40:11.660892956 +0000 UTC m=+1337.739148567" Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.078956 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.079678 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.079760 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.080589 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a168d0f1d4ea1589207def16c70ca26d39123d2f686ab970a58e0248c2c0905b"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.080650 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://a168d0f1d4ea1589207def16c70ca26d39123d2f686ab970a58e0248c2c0905b" gracePeriod=600 Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.743161 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="a168d0f1d4ea1589207def16c70ca26d39123d2f686ab970a58e0248c2c0905b" exitCode=0 Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.743254 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"a168d0f1d4ea1589207def16c70ca26d39123d2f686ab970a58e0248c2c0905b"} Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.744034 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617"} Oct 07 19:40:22 crc kubenswrapper[4813]: I1007 19:40:22.744064 4813 scope.go:117] "RemoveContainer" containerID="0f4da7fd23d52ded39cf69b0faa3801bac77bdff2643678a6b8540c579041a59" Oct 07 19:41:07 crc kubenswrapper[4813]: I1007 19:41:07.225843 4813 scope.go:117] "RemoveContainer" containerID="0842544f6e4f3273a498cd8fb11528a4821a5009b3ab2a12c9796bc10ff4b8f9" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.683669 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sx799"] Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.686974 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.710552 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx799"] Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.830935 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-catalog-content\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.830989 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-utilities\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.831015 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vdx9\" (UniqueName: \"kubernetes.io/projected/99f8357b-01a6-4055-8e51-dced8f057b13-kube-api-access-2vdx9\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.932702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-catalog-content\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.932793 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-utilities\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.932839 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vdx9\" (UniqueName: \"kubernetes.io/projected/99f8357b-01a6-4055-8e51-dced8f057b13-kube-api-access-2vdx9\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.933255 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-utilities\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.933397 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-catalog-content\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:34 crc kubenswrapper[4813]: I1007 19:41:34.961348 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vdx9\" (UniqueName: \"kubernetes.io/projected/99f8357b-01a6-4055-8e51-dced8f057b13-kube-api-access-2vdx9\") pod \"redhat-marketplace-sx799\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:35 crc kubenswrapper[4813]: I1007 19:41:35.007763 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:35 crc kubenswrapper[4813]: I1007 19:41:35.557182 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx799"] Oct 07 19:41:35 crc kubenswrapper[4813]: I1007 19:41:35.673664 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerStarted","Data":"75ffc4e4bbf5b041d2df8744ef117d60c191b151be4af22ed895f9ea9104c2e1"} Oct 07 19:41:36 crc kubenswrapper[4813]: I1007 19:41:36.685069 4813 generic.go:334] "Generic (PLEG): container finished" podID="99f8357b-01a6-4055-8e51-dced8f057b13" containerID="94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968" exitCode=0 Oct 07 19:41:36 crc kubenswrapper[4813]: I1007 19:41:36.685194 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerDied","Data":"94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968"} Oct 07 19:41:37 crc kubenswrapper[4813]: I1007 19:41:37.698309 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerStarted","Data":"f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33"} Oct 07 19:41:38 crc kubenswrapper[4813]: I1007 19:41:38.714678 4813 generic.go:334] "Generic (PLEG): container finished" podID="99f8357b-01a6-4055-8e51-dced8f057b13" containerID="f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33" exitCode=0 Oct 07 19:41:38 crc kubenswrapper[4813]: I1007 19:41:38.714760 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerDied","Data":"f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33"} Oct 07 19:41:39 crc kubenswrapper[4813]: I1007 19:41:39.727650 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerStarted","Data":"cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475"} Oct 07 19:41:39 crc kubenswrapper[4813]: I1007 19:41:39.754660 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sx799" podStartSLOduration=3.151587002 podStartE2EDuration="5.754640543s" podCreationTimestamp="2025-10-07 19:41:34 +0000 UTC" firstStartedPulling="2025-10-07 19:41:36.688209458 +0000 UTC m=+1422.766465069" lastFinishedPulling="2025-10-07 19:41:39.291262989 +0000 UTC m=+1425.369518610" observedRunningTime="2025-10-07 19:41:39.751064139 +0000 UTC m=+1425.829319790" watchObservedRunningTime="2025-10-07 19:41:39.754640543 +0000 UTC m=+1425.832896154" Oct 07 19:41:45 crc kubenswrapper[4813]: I1007 19:41:45.009755 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:45 crc kubenswrapper[4813]: I1007 19:41:45.010407 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:45 crc kubenswrapper[4813]: I1007 19:41:45.091290 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:45 crc kubenswrapper[4813]: I1007 19:41:45.863601 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:45 crc kubenswrapper[4813]: I1007 19:41:45.917737 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx799"] Oct 07 19:41:47 crc kubenswrapper[4813]: I1007 19:41:47.820300 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sx799" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="registry-server" containerID="cri-o://cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475" gracePeriod=2 Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.354332 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.436786 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vdx9\" (UniqueName: \"kubernetes.io/projected/99f8357b-01a6-4055-8e51-dced8f057b13-kube-api-access-2vdx9\") pod \"99f8357b-01a6-4055-8e51-dced8f057b13\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.436929 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-catalog-content\") pod \"99f8357b-01a6-4055-8e51-dced8f057b13\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.437003 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-utilities\") pod \"99f8357b-01a6-4055-8e51-dced8f057b13\" (UID: \"99f8357b-01a6-4055-8e51-dced8f057b13\") " Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.438673 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-utilities" (OuterVolumeSpecName: "utilities") pod "99f8357b-01a6-4055-8e51-dced8f057b13" (UID: "99f8357b-01a6-4055-8e51-dced8f057b13"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.444394 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99f8357b-01a6-4055-8e51-dced8f057b13-kube-api-access-2vdx9" (OuterVolumeSpecName: "kube-api-access-2vdx9") pod "99f8357b-01a6-4055-8e51-dced8f057b13" (UID: "99f8357b-01a6-4055-8e51-dced8f057b13"). InnerVolumeSpecName "kube-api-access-2vdx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.461013 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99f8357b-01a6-4055-8e51-dced8f057b13" (UID: "99f8357b-01a6-4055-8e51-dced8f057b13"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.539600 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vdx9\" (UniqueName: \"kubernetes.io/projected/99f8357b-01a6-4055-8e51-dced8f057b13-kube-api-access-2vdx9\") on node \"crc\" DevicePath \"\"" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.539627 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.539636 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f8357b-01a6-4055-8e51-dced8f057b13-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.836086 4813 generic.go:334] "Generic (PLEG): container finished" podID="99f8357b-01a6-4055-8e51-dced8f057b13" containerID="cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475" exitCode=0 Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.836149 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sx799" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.836199 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerDied","Data":"cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475"} Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.836652 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sx799" event={"ID":"99f8357b-01a6-4055-8e51-dced8f057b13","Type":"ContainerDied","Data":"75ffc4e4bbf5b041d2df8744ef117d60c191b151be4af22ed895f9ea9104c2e1"} Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.836699 4813 scope.go:117] "RemoveContainer" containerID="cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.863812 4813 scope.go:117] "RemoveContainer" containerID="f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.878836 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx799"] Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.882592 4813 scope.go:117] "RemoveContainer" containerID="94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.890951 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sx799"] Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.942419 4813 scope.go:117] "RemoveContainer" containerID="cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475" Oct 07 19:41:48 crc kubenswrapper[4813]: E1007 19:41:48.942943 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475\": container with ID starting with cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475 not found: ID does not exist" containerID="cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.942993 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475"} err="failed to get container status \"cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475\": rpc error: code = NotFound desc = could not find container \"cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475\": container with ID starting with cf3094369130b2684568cc0ee9ebda33b3bacc0284a798cb457324c3933fe475 not found: ID does not exist" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.943024 4813 scope.go:117] "RemoveContainer" containerID="f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33" Oct 07 19:41:48 crc kubenswrapper[4813]: E1007 19:41:48.943626 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33\": container with ID starting with f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33 not found: ID does not exist" containerID="f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.943655 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33"} err="failed to get container status \"f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33\": rpc error: code = NotFound desc = could not find container \"f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33\": container with ID starting with f9272f92f163434d85a12213505d2a8489394b28216fdbdf6a821ab86a9e0c33 not found: ID does not exist" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.943689 4813 scope.go:117] "RemoveContainer" containerID="94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968" Oct 07 19:41:48 crc kubenswrapper[4813]: E1007 19:41:48.943977 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968\": container with ID starting with 94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968 not found: ID does not exist" containerID="94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968" Oct 07 19:41:48 crc kubenswrapper[4813]: I1007 19:41:48.944002 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968"} err="failed to get container status \"94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968\": rpc error: code = NotFound desc = could not find container \"94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968\": container with ID starting with 94d71c090a093db8623fc3c859edc8afccf71bdabc16f9916590680860b85968 not found: ID does not exist" Oct 07 19:41:50 crc kubenswrapper[4813]: I1007 19:41:50.618659 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" path="/var/lib/kubelet/pods/99f8357b-01a6-4055-8e51-dced8f057b13/volumes" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.403139 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qvqqc"] Oct 07 19:41:51 crc kubenswrapper[4813]: E1007 19:41:51.404359 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="extract-content" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.404391 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="extract-content" Oct 07 19:41:51 crc kubenswrapper[4813]: E1007 19:41:51.404441 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="registry-server" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.404454 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="registry-server" Oct 07 19:41:51 crc kubenswrapper[4813]: E1007 19:41:51.404489 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="extract-utilities" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.404502 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="extract-utilities" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.404853 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="99f8357b-01a6-4055-8e51-dced8f057b13" containerName="registry-server" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.407059 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.416121 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qvqqc"] Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.511048 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm2vm\" (UniqueName: \"kubernetes.io/projected/68c46681-bc89-422a-ab79-5fe177f65708-kube-api-access-dm2vm\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.511145 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-utilities\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.511265 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-catalog-content\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.612740 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-utilities\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.612787 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-catalog-content\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.612908 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm2vm\" (UniqueName: \"kubernetes.io/projected/68c46681-bc89-422a-ab79-5fe177f65708-kube-api-access-dm2vm\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.613254 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-utilities\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.613441 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-catalog-content\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.632462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm2vm\" (UniqueName: \"kubernetes.io/projected/68c46681-bc89-422a-ab79-5fe177f65708-kube-api-access-dm2vm\") pod \"community-operators-qvqqc\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:51 crc kubenswrapper[4813]: I1007 19:41:51.727133 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:41:52 crc kubenswrapper[4813]: I1007 19:41:52.182295 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qvqqc"] Oct 07 19:41:52 crc kubenswrapper[4813]: I1007 19:41:52.895270 4813 generic.go:334] "Generic (PLEG): container finished" podID="68c46681-bc89-422a-ab79-5fe177f65708" containerID="63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a" exitCode=0 Oct 07 19:41:52 crc kubenswrapper[4813]: I1007 19:41:52.895385 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerDied","Data":"63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a"} Oct 07 19:41:52 crc kubenswrapper[4813]: I1007 19:41:52.895434 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerStarted","Data":"5e9953e604de3269bdf4fab0b364a8f524a3b033350078fc1c45afe9d6819e2c"} Oct 07 19:41:53 crc kubenswrapper[4813]: I1007 19:41:53.907206 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerStarted","Data":"4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883"} Oct 07 19:41:55 crc kubenswrapper[4813]: I1007 19:41:55.929279 4813 generic.go:334] "Generic (PLEG): container finished" podID="68c46681-bc89-422a-ab79-5fe177f65708" containerID="4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883" exitCode=0 Oct 07 19:41:55 crc kubenswrapper[4813]: I1007 19:41:55.929395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerDied","Data":"4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883"} Oct 07 19:41:56 crc kubenswrapper[4813]: I1007 19:41:56.943249 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerStarted","Data":"e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6"} Oct 07 19:41:56 crc kubenswrapper[4813]: I1007 19:41:56.971998 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qvqqc" podStartSLOduration=2.491633479 podStartE2EDuration="5.971977789s" podCreationTimestamp="2025-10-07 19:41:51 +0000 UTC" firstStartedPulling="2025-10-07 19:41:52.897457186 +0000 UTC m=+1438.975712837" lastFinishedPulling="2025-10-07 19:41:56.377801526 +0000 UTC m=+1442.456057147" observedRunningTime="2025-10-07 19:41:56.960580207 +0000 UTC m=+1443.038835818" watchObservedRunningTime="2025-10-07 19:41:56.971977789 +0000 UTC m=+1443.050233400" Oct 07 19:42:01 crc kubenswrapper[4813]: I1007 19:42:01.728276 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:42:01 crc kubenswrapper[4813]: I1007 19:42:01.728948 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:42:01 crc kubenswrapper[4813]: I1007 19:42:01.790032 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:42:02 crc kubenswrapper[4813]: I1007 19:42:02.090951 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:42:02 crc kubenswrapper[4813]: I1007 19:42:02.163383 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qvqqc"] Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.034194 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qvqqc" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="registry-server" containerID="cri-o://e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6" gracePeriod=2 Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.483944 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.660488 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm2vm\" (UniqueName: \"kubernetes.io/projected/68c46681-bc89-422a-ab79-5fe177f65708-kube-api-access-dm2vm\") pod \"68c46681-bc89-422a-ab79-5fe177f65708\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.660840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-catalog-content\") pod \"68c46681-bc89-422a-ab79-5fe177f65708\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.660937 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-utilities\") pod \"68c46681-bc89-422a-ab79-5fe177f65708\" (UID: \"68c46681-bc89-422a-ab79-5fe177f65708\") " Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.662083 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-utilities" (OuterVolumeSpecName: "utilities") pod "68c46681-bc89-422a-ab79-5fe177f65708" (UID: "68c46681-bc89-422a-ab79-5fe177f65708"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.662268 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.666603 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68c46681-bc89-422a-ab79-5fe177f65708-kube-api-access-dm2vm" (OuterVolumeSpecName: "kube-api-access-dm2vm") pod "68c46681-bc89-422a-ab79-5fe177f65708" (UID: "68c46681-bc89-422a-ab79-5fe177f65708"). InnerVolumeSpecName "kube-api-access-dm2vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.704959 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68c46681-bc89-422a-ab79-5fe177f65708" (UID: "68c46681-bc89-422a-ab79-5fe177f65708"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.765017 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm2vm\" (UniqueName: \"kubernetes.io/projected/68c46681-bc89-422a-ab79-5fe177f65708-kube-api-access-dm2vm\") on node \"crc\" DevicePath \"\"" Oct 07 19:42:04 crc kubenswrapper[4813]: I1007 19:42:04.765045 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68c46681-bc89-422a-ab79-5fe177f65708-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.046679 4813 generic.go:334] "Generic (PLEG): container finished" podID="68c46681-bc89-422a-ab79-5fe177f65708" containerID="e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6" exitCode=0 Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.046735 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerDied","Data":"e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6"} Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.046764 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qvqqc" event={"ID":"68c46681-bc89-422a-ab79-5fe177f65708","Type":"ContainerDied","Data":"5e9953e604de3269bdf4fab0b364a8f524a3b033350078fc1c45afe9d6819e2c"} Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.046785 4813 scope.go:117] "RemoveContainer" containerID="e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.046890 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qvqqc" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.070768 4813 scope.go:117] "RemoveContainer" containerID="4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.122470 4813 scope.go:117] "RemoveContainer" containerID="63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.127788 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qvqqc"] Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.142643 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qvqqc"] Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.192638 4813 scope.go:117] "RemoveContainer" containerID="e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6" Oct 07 19:42:05 crc kubenswrapper[4813]: E1007 19:42:05.193422 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6\": container with ID starting with e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6 not found: ID does not exist" containerID="e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.193455 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6"} err="failed to get container status \"e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6\": rpc error: code = NotFound desc = could not find container \"e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6\": container with ID starting with e3b68c3e4e7c23c70ee9dc5772e964bd9d5bba0cf4560c3505d22cf79d3344a6 not found: ID does not exist" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.193480 4813 scope.go:117] "RemoveContainer" containerID="4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883" Oct 07 19:42:05 crc kubenswrapper[4813]: E1007 19:42:05.195392 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883\": container with ID starting with 4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883 not found: ID does not exist" containerID="4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.195414 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883"} err="failed to get container status \"4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883\": rpc error: code = NotFound desc = could not find container \"4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883\": container with ID starting with 4c332053807ec1f0d2c51d5044ca513d0f32b4865d413d6c5cbfbe35b75ba883 not found: ID does not exist" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.195430 4813 scope.go:117] "RemoveContainer" containerID="63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a" Oct 07 19:42:05 crc kubenswrapper[4813]: E1007 19:42:05.195812 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a\": container with ID starting with 63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a not found: ID does not exist" containerID="63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a" Oct 07 19:42:05 crc kubenswrapper[4813]: I1007 19:42:05.195831 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a"} err="failed to get container status \"63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a\": rpc error: code = NotFound desc = could not find container \"63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a\": container with ID starting with 63d3f22aeb66fdb51616e22fffa4c3666ec009f6044b63bfdf6e9f505df8f96a not found: ID does not exist" Oct 07 19:42:06 crc kubenswrapper[4813]: I1007 19:42:06.619520 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68c46681-bc89-422a-ab79-5fe177f65708" path="/var/lib/kubelet/pods/68c46681-bc89-422a-ab79-5fe177f65708/volumes" Oct 07 19:42:22 crc kubenswrapper[4813]: I1007 19:42:22.079074 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:42:22 crc kubenswrapper[4813]: I1007 19:42:22.079799 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:42:52 crc kubenswrapper[4813]: I1007 19:42:52.078545 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:42:52 crc kubenswrapper[4813]: I1007 19:42:52.079295 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.895489 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wf8jb"] Oct 07 19:43:08 crc kubenswrapper[4813]: E1007 19:43:08.896271 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="registry-server" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.896283 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="registry-server" Oct 07 19:43:08 crc kubenswrapper[4813]: E1007 19:43:08.896298 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="extract-content" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.896304 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="extract-content" Oct 07 19:43:08 crc kubenswrapper[4813]: E1007 19:43:08.896388 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="extract-utilities" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.896396 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="extract-utilities" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.896586 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="68c46681-bc89-422a-ab79-5fe177f65708" containerName="registry-server" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.897880 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:08 crc kubenswrapper[4813]: I1007 19:43:08.919591 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wf8jb"] Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.055535 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh9kd\" (UniqueName: \"kubernetes.io/projected/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-kube-api-access-bh9kd\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.055595 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-utilities\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.055619 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-catalog-content\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.158373 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-utilities\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.158417 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-catalog-content\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.158551 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh9kd\" (UniqueName: \"kubernetes.io/projected/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-kube-api-access-bh9kd\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.159242 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-utilities\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.159278 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-catalog-content\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.183041 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh9kd\" (UniqueName: \"kubernetes.io/projected/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-kube-api-access-bh9kd\") pod \"redhat-operators-wf8jb\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.232891 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.700506 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wf8jb"] Oct 07 19:43:09 crc kubenswrapper[4813]: I1007 19:43:09.792622 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerStarted","Data":"6238f071fef1ecc487bb1b4d2711732a579115bc02988c64c447068ada9fcca6"} Oct 07 19:43:10 crc kubenswrapper[4813]: I1007 19:43:10.804024 4813 generic.go:334] "Generic (PLEG): container finished" podID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerID="8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd" exitCode=0 Oct 07 19:43:10 crc kubenswrapper[4813]: I1007 19:43:10.804124 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerDied","Data":"8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd"} Oct 07 19:43:10 crc kubenswrapper[4813]: I1007 19:43:10.807445 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:43:12 crc kubenswrapper[4813]: I1007 19:43:12.829358 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerStarted","Data":"7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e"} Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.550357 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hsdp7"] Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.554230 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.568112 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsdp7"] Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.631959 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-catalog-content\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.632161 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vsl7\" (UniqueName: \"kubernetes.io/projected/14c5bcae-9598-411c-8a15-56302ff3bb35-kube-api-access-8vsl7\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.632196 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-utilities\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.734443 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-catalog-content\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.734654 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vsl7\" (UniqueName: \"kubernetes.io/projected/14c5bcae-9598-411c-8a15-56302ff3bb35-kube-api-access-8vsl7\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.734684 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-utilities\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.735239 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-utilities\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.735734 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-catalog-content\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.759482 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vsl7\" (UniqueName: \"kubernetes.io/projected/14c5bcae-9598-411c-8a15-56302ff3bb35-kube-api-access-8vsl7\") pod \"certified-operators-hsdp7\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:21 crc kubenswrapper[4813]: I1007 19:43:21.873745 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.078915 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.079216 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.079264 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.080344 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.080398 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" gracePeriod=600 Oct 07 19:43:22 crc kubenswrapper[4813]: E1007 19:43:22.219127 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.355094 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsdp7"] Oct 07 19:43:22 crc kubenswrapper[4813]: W1007 19:43:22.375095 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14c5bcae_9598_411c_8a15_56302ff3bb35.slice/crio-86f1a50d1da42b7542ee0777026ffb785bc5a633fd229a74a289be76649bb0ff WatchSource:0}: Error finding container 86f1a50d1da42b7542ee0777026ffb785bc5a633fd229a74a289be76649bb0ff: Status 404 returned error can't find the container with id 86f1a50d1da42b7542ee0777026ffb785bc5a633fd229a74a289be76649bb0ff Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.947240 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" exitCode=0 Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.947345 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617"} Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.947728 4813 scope.go:117] "RemoveContainer" containerID="a168d0f1d4ea1589207def16c70ca26d39123d2f686ab970a58e0248c2c0905b" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.948761 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:43:22 crc kubenswrapper[4813]: E1007 19:43:22.949173 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.953720 4813 generic.go:334] "Generic (PLEG): container finished" podID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerID="7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e" exitCode=0 Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.953797 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerDied","Data":"7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e"} Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.955756 4813 generic.go:334] "Generic (PLEG): container finished" podID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerID="94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999" exitCode=0 Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.955821 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerDied","Data":"94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999"} Oct 07 19:43:22 crc kubenswrapper[4813]: I1007 19:43:22.955907 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerStarted","Data":"86f1a50d1da42b7542ee0777026ffb785bc5a633fd229a74a289be76649bb0ff"} Oct 07 19:43:23 crc kubenswrapper[4813]: I1007 19:43:23.969286 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerStarted","Data":"d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852"} Oct 07 19:43:23 crc kubenswrapper[4813]: I1007 19:43:23.972717 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerStarted","Data":"2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1"} Oct 07 19:43:23 crc kubenswrapper[4813]: I1007 19:43:23.991469 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wf8jb" podStartSLOduration=3.388250856 podStartE2EDuration="15.99145414s" podCreationTimestamp="2025-10-07 19:43:08 +0000 UTC" firstStartedPulling="2025-10-07 19:43:10.807147187 +0000 UTC m=+1516.885402828" lastFinishedPulling="2025-10-07 19:43:23.410350501 +0000 UTC m=+1529.488606112" observedRunningTime="2025-10-07 19:43:23.987848945 +0000 UTC m=+1530.066104556" watchObservedRunningTime="2025-10-07 19:43:23.99145414 +0000 UTC m=+1530.069709751" Oct 07 19:43:25 crc kubenswrapper[4813]: I1007 19:43:25.994000 4813 generic.go:334] "Generic (PLEG): container finished" podID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerID="2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1" exitCode=0 Oct 07 19:43:25 crc kubenswrapper[4813]: I1007 19:43:25.994099 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerDied","Data":"2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1"} Oct 07 19:43:27 crc kubenswrapper[4813]: I1007 19:43:27.007185 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerStarted","Data":"53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8"} Oct 07 19:43:27 crc kubenswrapper[4813]: I1007 19:43:27.036132 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hsdp7" podStartSLOduration=2.510596725 podStartE2EDuration="6.036110363s" podCreationTimestamp="2025-10-07 19:43:21 +0000 UTC" firstStartedPulling="2025-10-07 19:43:22.957963876 +0000 UTC m=+1529.036219477" lastFinishedPulling="2025-10-07 19:43:26.483477494 +0000 UTC m=+1532.561733115" observedRunningTime="2025-10-07 19:43:27.026290247 +0000 UTC m=+1533.104545878" watchObservedRunningTime="2025-10-07 19:43:27.036110363 +0000 UTC m=+1533.114365984" Oct 07 19:43:29 crc kubenswrapper[4813]: I1007 19:43:29.233597 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:29 crc kubenswrapper[4813]: I1007 19:43:29.234971 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:30 crc kubenswrapper[4813]: I1007 19:43:30.285531 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wf8jb" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" probeResult="failure" output=< Oct 07 19:43:30 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:43:30 crc kubenswrapper[4813]: > Oct 07 19:43:31 crc kubenswrapper[4813]: I1007 19:43:31.875200 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:31 crc kubenswrapper[4813]: I1007 19:43:31.875570 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:32 crc kubenswrapper[4813]: I1007 19:43:32.942704 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-hsdp7" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="registry-server" probeResult="failure" output=< Oct 07 19:43:32 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:43:32 crc kubenswrapper[4813]: > Oct 07 19:43:33 crc kubenswrapper[4813]: I1007 19:43:33.050094 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-sbds4"] Oct 07 19:43:33 crc kubenswrapper[4813]: I1007 19:43:33.080172 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-4g6mq"] Oct 07 19:43:33 crc kubenswrapper[4813]: I1007 19:43:33.091532 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-4g6mq"] Oct 07 19:43:33 crc kubenswrapper[4813]: I1007 19:43:33.101789 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-sbds4"] Oct 07 19:43:34 crc kubenswrapper[4813]: I1007 19:43:34.616377 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97db72c2-132d-4b61-afb2-e65936b1352d" path="/var/lib/kubelet/pods/97db72c2-132d-4b61-afb2-e65936b1352d/volumes" Oct 07 19:43:34 crc kubenswrapper[4813]: I1007 19:43:34.617354 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99efc153-28b9-4f27-86a3-b8a913bc66df" path="/var/lib/kubelet/pods/99efc153-28b9-4f27-86a3-b8a913bc66df/volumes" Oct 07 19:43:35 crc kubenswrapper[4813]: I1007 19:43:35.605270 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:43:35 crc kubenswrapper[4813]: E1007 19:43:35.605842 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:43:40 crc kubenswrapper[4813]: I1007 19:43:40.162505 4813 generic.go:334] "Generic (PLEG): container finished" podID="f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" containerID="ff9babf3669b93e5f33469c4e8f48a6e275d4880b16e1cff940733f9b32a332c" exitCode=0 Oct 07 19:43:40 crc kubenswrapper[4813]: I1007 19:43:40.162793 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" event={"ID":"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3","Type":"ContainerDied","Data":"ff9babf3669b93e5f33469c4e8f48a6e275d4880b16e1cff940733f9b32a332c"} Oct 07 19:43:40 crc kubenswrapper[4813]: I1007 19:43:40.286497 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wf8jb" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" probeResult="failure" output=< Oct 07 19:43:40 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:43:40 crc kubenswrapper[4813]: > Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.035348 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-kgrtz"] Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.047112 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-kgrtz"] Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.636091 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.751952 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-ssh-key\") pod \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.752091 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4xx5\" (UniqueName: \"kubernetes.io/projected/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-kube-api-access-d4xx5\") pod \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.752205 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-bootstrap-combined-ca-bundle\") pod \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.752840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-inventory\") pod \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\" (UID: \"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3\") " Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.761054 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-kube-api-access-d4xx5" (OuterVolumeSpecName: "kube-api-access-d4xx5") pod "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" (UID: "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3"). InnerVolumeSpecName "kube-api-access-d4xx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.761470 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" (UID: "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.785875 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-inventory" (OuterVolumeSpecName: "inventory") pod "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" (UID: "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.790428 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" (UID: "f58a4cdc-b5b0-421f-bd28-6c46f3d99af3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.855175 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.855205 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.855215 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4xx5\" (UniqueName: \"kubernetes.io/projected/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-kube-api-access-d4xx5\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.855226 4813 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f58a4cdc-b5b0-421f-bd28-6c46f3d99af3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.921162 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:41 crc kubenswrapper[4813]: I1007 19:43:41.987498 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.160934 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsdp7"] Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.184808 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" event={"ID":"f58a4cdc-b5b0-421f-bd28-6c46f3d99af3","Type":"ContainerDied","Data":"42139f0113a681019456bd7d22ef6b17b67cc06205fe6d83ce332526fd9f37f7"} Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.184846 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42139f0113a681019456bd7d22ef6b17b67cc06205fe6d83ce332526fd9f37f7" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.184911 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.305377 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn"] Oct 07 19:43:42 crc kubenswrapper[4813]: E1007 19:43:42.306151 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.306169 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.306412 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f58a4cdc-b5b0-421f-bd28-6c46f3d99af3" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.307176 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.313348 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.315729 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.316306 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.329614 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.336594 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn"] Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.465434 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh7mn\" (UniqueName: \"kubernetes.io/projected/4713cec2-7e5d-4d1b-8436-1cd44794b936-kube-api-access-zh7mn\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.465524 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.465643 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.567803 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh7mn\" (UniqueName: \"kubernetes.io/projected/4713cec2-7e5d-4d1b-8436-1cd44794b936-kube-api-access-zh7mn\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.567941 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.568089 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.587877 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.589197 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.591061 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh7mn\" (UniqueName: \"kubernetes.io/projected/4713cec2-7e5d-4d1b-8436-1cd44794b936-kube-api-access-zh7mn\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.615579 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2a590f8-2fe3-48e8-b1fa-599605162117" path="/var/lib/kubelet/pods/c2a590f8-2fe3-48e8-b1fa-599605162117/volumes" Oct 07 19:43:42 crc kubenswrapper[4813]: I1007 19:43:42.641129 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.043075 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-2f75-account-create-4bvsq"] Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.055242 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-2f75-account-create-4bvsq"] Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.193593 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hsdp7" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="registry-server" containerID="cri-o://53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8" gracePeriod=2 Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.199741 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn"] Oct 07 19:43:43 crc kubenswrapper[4813]: W1007 19:43:43.208387 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4713cec2_7e5d_4d1b_8436_1cd44794b936.slice/crio-c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2 WatchSource:0}: Error finding container c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2: Status 404 returned error can't find the container with id c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2 Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.537711 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.689826 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-utilities\") pod \"14c5bcae-9598-411c-8a15-56302ff3bb35\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.689939 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-catalog-content\") pod \"14c5bcae-9598-411c-8a15-56302ff3bb35\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.689990 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vsl7\" (UniqueName: \"kubernetes.io/projected/14c5bcae-9598-411c-8a15-56302ff3bb35-kube-api-access-8vsl7\") pod \"14c5bcae-9598-411c-8a15-56302ff3bb35\" (UID: \"14c5bcae-9598-411c-8a15-56302ff3bb35\") " Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.696696 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14c5bcae-9598-411c-8a15-56302ff3bb35-kube-api-access-8vsl7" (OuterVolumeSpecName: "kube-api-access-8vsl7") pod "14c5bcae-9598-411c-8a15-56302ff3bb35" (UID: "14c5bcae-9598-411c-8a15-56302ff3bb35"). InnerVolumeSpecName "kube-api-access-8vsl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.698929 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-utilities" (OuterVolumeSpecName: "utilities") pod "14c5bcae-9598-411c-8a15-56302ff3bb35" (UID: "14c5bcae-9598-411c-8a15-56302ff3bb35"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.735565 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14c5bcae-9598-411c-8a15-56302ff3bb35" (UID: "14c5bcae-9598-411c-8a15-56302ff3bb35"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.792411 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.792446 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vsl7\" (UniqueName: \"kubernetes.io/projected/14c5bcae-9598-411c-8a15-56302ff3bb35-kube-api-access-8vsl7\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:43 crc kubenswrapper[4813]: I1007 19:43:43.792460 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c5bcae-9598-411c-8a15-56302ff3bb35-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.206552 4813 generic.go:334] "Generic (PLEG): container finished" podID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerID="53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8" exitCode=0 Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.206572 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsdp7" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.206599 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerDied","Data":"53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8"} Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.207266 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsdp7" event={"ID":"14c5bcae-9598-411c-8a15-56302ff3bb35","Type":"ContainerDied","Data":"86f1a50d1da42b7542ee0777026ffb785bc5a633fd229a74a289be76649bb0ff"} Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.207397 4813 scope.go:117] "RemoveContainer" containerID="53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.209615 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" event={"ID":"4713cec2-7e5d-4d1b-8436-1cd44794b936","Type":"ContainerStarted","Data":"3b64ea0161ddaba16fb267494065ae7646597df502d60964df18509e180bcd4d"} Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.209640 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" event={"ID":"4713cec2-7e5d-4d1b-8436-1cd44794b936","Type":"ContainerStarted","Data":"c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2"} Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.236225 4813 scope.go:117] "RemoveContainer" containerID="2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.259562 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" podStartSLOduration=1.857279022 podStartE2EDuration="2.259536275s" podCreationTimestamp="2025-10-07 19:43:42 +0000 UTC" firstStartedPulling="2025-10-07 19:43:43.210129577 +0000 UTC m=+1549.288385188" lastFinishedPulling="2025-10-07 19:43:43.61238683 +0000 UTC m=+1549.690642441" observedRunningTime="2025-10-07 19:43:44.230716175 +0000 UTC m=+1550.308971796" watchObservedRunningTime="2025-10-07 19:43:44.259536275 +0000 UTC m=+1550.337791896" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.286744 4813 scope.go:117] "RemoveContainer" containerID="94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.287099 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsdp7"] Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.293242 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hsdp7"] Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.319998 4813 scope.go:117] "RemoveContainer" containerID="53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8" Oct 07 19:43:44 crc kubenswrapper[4813]: E1007 19:43:44.320595 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8\": container with ID starting with 53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8 not found: ID does not exist" containerID="53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.320658 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8"} err="failed to get container status \"53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8\": rpc error: code = NotFound desc = could not find container \"53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8\": container with ID starting with 53cdb9d1c82cddcd8ec08e97bc4a004d4050cf39151b0b6033d06e46730c35c8 not found: ID does not exist" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.320687 4813 scope.go:117] "RemoveContainer" containerID="2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1" Oct 07 19:43:44 crc kubenswrapper[4813]: E1007 19:43:44.321148 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1\": container with ID starting with 2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1 not found: ID does not exist" containerID="2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.321178 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1"} err="failed to get container status \"2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1\": rpc error: code = NotFound desc = could not find container \"2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1\": container with ID starting with 2afdbb09cb2eb3b36b674e169f026250fa58534739d9382d70599e35af8d98a1 not found: ID does not exist" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.321197 4813 scope.go:117] "RemoveContainer" containerID="94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999" Oct 07 19:43:44 crc kubenswrapper[4813]: E1007 19:43:44.321559 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999\": container with ID starting with 94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999 not found: ID does not exist" containerID="94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.321617 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999"} err="failed to get container status \"94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999\": rpc error: code = NotFound desc = could not find container \"94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999\": container with ID starting with 94f37d5bc8d341c70034ae39bcf4ca2b3afbbc84d145901c50010a7d62fdd999 not found: ID does not exist" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.618900 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b0553df-a455-41ec-938f-ef494964014d" path="/var/lib/kubelet/pods/0b0553df-a455-41ec-938f-ef494964014d/volumes" Oct 07 19:43:44 crc kubenswrapper[4813]: I1007 19:43:44.619485 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" path="/var/lib/kubelet/pods/14c5bcae-9598-411c-8a15-56302ff3bb35/volumes" Oct 07 19:43:50 crc kubenswrapper[4813]: I1007 19:43:50.293868 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wf8jb" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" probeResult="failure" output=< Oct 07 19:43:50 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:43:50 crc kubenswrapper[4813]: > Oct 07 19:43:50 crc kubenswrapper[4813]: I1007 19:43:50.602742 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:43:50 crc kubenswrapper[4813]: E1007 19:43:50.603005 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:43:51 crc kubenswrapper[4813]: I1007 19:43:51.042750 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-ab07-account-create-llmcr"] Oct 07 19:43:51 crc kubenswrapper[4813]: I1007 19:43:51.062556 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-ab07-account-create-llmcr"] Oct 07 19:43:52 crc kubenswrapper[4813]: I1007 19:43:52.624734 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af27a525-e196-45c9-bf20-7bce89dfae1f" path="/var/lib/kubelet/pods/af27a525-e196-45c9-bf20-7bce89dfae1f/volumes" Oct 07 19:43:59 crc kubenswrapper[4813]: I1007 19:43:59.322638 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:59 crc kubenswrapper[4813]: I1007 19:43:59.405077 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:43:59 crc kubenswrapper[4813]: I1007 19:43:59.591792 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wf8jb"] Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.036409 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5cae-account-create-qzhks"] Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.048706 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5cae-account-create-qzhks"] Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.391543 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wf8jb" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" containerID="cri-o://d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852" gracePeriod=2 Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.620018 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc337fb-89a0-4837-9137-ea0d3b8f51eb" path="/var/lib/kubelet/pods/efc337fb-89a0-4837-9137-ea0d3b8f51eb/volumes" Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.839709 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.951178 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-catalog-content\") pod \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.951688 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh9kd\" (UniqueName: \"kubernetes.io/projected/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-kube-api-access-bh9kd\") pod \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.951754 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-utilities\") pod \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\" (UID: \"4d14a760-1e5c-4ee2-8a42-5fe44050a52a\") " Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.953150 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-utilities" (OuterVolumeSpecName: "utilities") pod "4d14a760-1e5c-4ee2-8a42-5fe44050a52a" (UID: "4d14a760-1e5c-4ee2-8a42-5fe44050a52a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:44:00 crc kubenswrapper[4813]: I1007 19:44:00.965592 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-kube-api-access-bh9kd" (OuterVolumeSpecName: "kube-api-access-bh9kd") pod "4d14a760-1e5c-4ee2-8a42-5fe44050a52a" (UID: "4d14a760-1e5c-4ee2-8a42-5fe44050a52a"). InnerVolumeSpecName "kube-api-access-bh9kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.025304 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d14a760-1e5c-4ee2-8a42-5fe44050a52a" (UID: "4d14a760-1e5c-4ee2-8a42-5fe44050a52a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.053860 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh9kd\" (UniqueName: \"kubernetes.io/projected/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-kube-api-access-bh9kd\") on node \"crc\" DevicePath \"\"" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.053891 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.053902 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d14a760-1e5c-4ee2-8a42-5fe44050a52a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.402133 4813 generic.go:334] "Generic (PLEG): container finished" podID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerID="d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852" exitCode=0 Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.402184 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerDied","Data":"d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852"} Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.402217 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wf8jb" event={"ID":"4d14a760-1e5c-4ee2-8a42-5fe44050a52a","Type":"ContainerDied","Data":"6238f071fef1ecc487bb1b4d2711732a579115bc02988c64c447068ada9fcca6"} Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.402236 4813 scope.go:117] "RemoveContainer" containerID="d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.402414 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wf8jb" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.443917 4813 scope.go:117] "RemoveContainer" containerID="7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.446423 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wf8jb"] Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.456245 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wf8jb"] Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.474552 4813 scope.go:117] "RemoveContainer" containerID="8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.515558 4813 scope.go:117] "RemoveContainer" containerID="d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852" Oct 07 19:44:01 crc kubenswrapper[4813]: E1007 19:44:01.516122 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852\": container with ID starting with d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852 not found: ID does not exist" containerID="d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.516175 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852"} err="failed to get container status \"d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852\": rpc error: code = NotFound desc = could not find container \"d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852\": container with ID starting with d7294a4f533748d536f2f54804183d2765cee295320ec646447dd15c57b60852 not found: ID does not exist" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.516249 4813 scope.go:117] "RemoveContainer" containerID="7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e" Oct 07 19:44:01 crc kubenswrapper[4813]: E1007 19:44:01.516968 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e\": container with ID starting with 7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e not found: ID does not exist" containerID="7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.517006 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e"} err="failed to get container status \"7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e\": rpc error: code = NotFound desc = could not find container \"7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e\": container with ID starting with 7c11f3965e39f1ba0a0fa4d946af76c3e6a0dbe4b1f08708838118ca07cd482e not found: ID does not exist" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.517039 4813 scope.go:117] "RemoveContainer" containerID="8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd" Oct 07 19:44:01 crc kubenswrapper[4813]: E1007 19:44:01.517455 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd\": container with ID starting with 8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd not found: ID does not exist" containerID="8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.517524 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd"} err="failed to get container status \"8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd\": rpc error: code = NotFound desc = could not find container \"8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd\": container with ID starting with 8cd6b282e7b65874df591858704ba2670cddff06cc5f03292d07f5053a1310fd not found: ID does not exist" Oct 07 19:44:01 crc kubenswrapper[4813]: I1007 19:44:01.608400 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:44:01 crc kubenswrapper[4813]: E1007 19:44:01.608755 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:44:02 crc kubenswrapper[4813]: I1007 19:44:02.615757 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" path="/var/lib/kubelet/pods/4d14a760-1e5c-4ee2-8a42-5fe44050a52a/volumes" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.428304 4813 scope.go:117] "RemoveContainer" containerID="249d5e7b1d54c523dc81d83bc28d0c3bfb98d89a38be4cc8ba5ab6a99134d85f" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.457168 4813 scope.go:117] "RemoveContainer" containerID="0d523f0ed776f2b58b3cb228e461ae3998a527f908c88af624ff44b83e336abd" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.519193 4813 scope.go:117] "RemoveContainer" containerID="aff9db31e0ffc2a87411c8853c73b0c6761072f763bc58434ca7b205064d4c9b" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.571466 4813 scope.go:117] "RemoveContainer" containerID="e2fd8969c23a6a6c970eb66c08dbb3d91dca2794dfbd416f28ca46e2c2b4e8f4" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.612296 4813 scope.go:117] "RemoveContainer" containerID="49ed646c9286297ac09320a65314843f97bcf1c7f93927c3b7ee8239062c5102" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.634198 4813 scope.go:117] "RemoveContainer" containerID="20f0a158a7b5377e8de387bfcd55424edf328ca16bbc7ce7ead0c523b3c703fc" Oct 07 19:44:07 crc kubenswrapper[4813]: I1007 19:44:07.676021 4813 scope.go:117] "RemoveContainer" containerID="6d784c2d4053e44e8fbd3d6387c9158261367aed26d93538a11d46a80e648de7" Oct 07 19:44:08 crc kubenswrapper[4813]: I1007 19:44:08.037592 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-6x4qp"] Oct 07 19:44:08 crc kubenswrapper[4813]: I1007 19:44:08.046223 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-6x4qp"] Oct 07 19:44:08 crc kubenswrapper[4813]: I1007 19:44:08.621201 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d72dd22-ed08-4510-9c62-d01807e11064" path="/var/lib/kubelet/pods/9d72dd22-ed08-4510-9c62-d01807e11064/volumes" Oct 07 19:44:13 crc kubenswrapper[4813]: I1007 19:44:13.603209 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:44:13 crc kubenswrapper[4813]: E1007 19:44:13.603902 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.051955 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-2bcbb"] Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.062943 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-886kw"] Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.105731 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-275zb"] Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.114568 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-886kw"] Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.127482 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-2bcbb"] Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.137038 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-275zb"] Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.616257 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d" path="/var/lib/kubelet/pods/0347a0a9-e25f-4ee5-9fb7-f46a2ff6ea0d/volumes" Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.617420 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dbca003-4721-4826-ba32-c996b89f1068" path="/var/lib/kubelet/pods/7dbca003-4721-4826-ba32-c996b89f1068/volumes" Oct 07 19:44:20 crc kubenswrapper[4813]: I1007 19:44:20.619056 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d68c3899-a167-4624-96fa-129664c55bff" path="/var/lib/kubelet/pods/d68c3899-a167-4624-96fa-129664c55bff/volumes" Oct 07 19:44:28 crc kubenswrapper[4813]: I1007 19:44:28.603526 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:44:28 crc kubenswrapper[4813]: E1007 19:44:28.605606 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:44:29 crc kubenswrapper[4813]: I1007 19:44:29.031705 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-xbcqc"] Oct 07 19:44:29 crc kubenswrapper[4813]: I1007 19:44:29.038535 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-xbcqc"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.062271 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-28a4-account-create-5qhdj"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.080110 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-a752-account-create-zm29m"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.090433 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-3bbe-account-create-gjbjp"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.097536 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-a752-account-create-zm29m"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.106023 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-28a4-account-create-5qhdj"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.112799 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-3bbe-account-create-gjbjp"] Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.624175 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54066ef9-3488-4b4c-a9ba-ba8a6cf48df2" path="/var/lib/kubelet/pods/54066ef9-3488-4b4c-a9ba-ba8a6cf48df2/volumes" Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.625773 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9fc7274-f1a7-436c-bd91-da50a3c3607b" path="/var/lib/kubelet/pods/a9fc7274-f1a7-436c-bd91-da50a3c3607b/volumes" Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.626878 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee38026-115c-4782-bdde-eadd7cd26d62" path="/var/lib/kubelet/pods/aee38026-115c-4782-bdde-eadd7cd26d62/volumes" Oct 07 19:44:30 crc kubenswrapper[4813]: I1007 19:44:30.628106 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b61b5e30-f563-4f0c-9578-4953d831ffb9" path="/var/lib/kubelet/pods/b61b5e30-f563-4f0c-9578-4953d831ffb9/volumes" Oct 07 19:44:43 crc kubenswrapper[4813]: I1007 19:44:43.604232 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:44:43 crc kubenswrapper[4813]: E1007 19:44:43.605262 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:44:58 crc kubenswrapper[4813]: I1007 19:44:58.602966 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:44:58 crc kubenswrapper[4813]: E1007 19:44:58.603663 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.193251 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz"] Oct 07 19:45:00 crc kubenswrapper[4813]: E1007 19:45:00.193871 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="extract-utilities" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.193897 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="extract-utilities" Oct 07 19:45:00 crc kubenswrapper[4813]: E1007 19:45:00.193920 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.193930 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" Oct 07 19:45:00 crc kubenswrapper[4813]: E1007 19:45:00.193946 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="extract-content" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.193955 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="extract-content" Oct 07 19:45:00 crc kubenswrapper[4813]: E1007 19:45:00.193983 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="extract-content" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.193994 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="extract-content" Oct 07 19:45:00 crc kubenswrapper[4813]: E1007 19:45:00.194013 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="registry-server" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.194023 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="registry-server" Oct 07 19:45:00 crc kubenswrapper[4813]: E1007 19:45:00.194072 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="extract-utilities" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.194086 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="extract-utilities" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.194403 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="14c5bcae-9598-411c-8a15-56302ff3bb35" containerName="registry-server" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.194458 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d14a760-1e5c-4ee2-8a42-5fe44050a52a" containerName="registry-server" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.195393 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.199512 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.199872 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.205839 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz"] Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.271554 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22811987-5993-47b8-88ab-f0665c950567-secret-volume\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.272059 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22811987-5993-47b8-88ab-f0665c950567-config-volume\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.272263 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xljt8\" (UniqueName: \"kubernetes.io/projected/22811987-5993-47b8-88ab-f0665c950567-kube-api-access-xljt8\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.374095 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22811987-5993-47b8-88ab-f0665c950567-secret-volume\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.374560 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22811987-5993-47b8-88ab-f0665c950567-config-volume\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.374862 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xljt8\" (UniqueName: \"kubernetes.io/projected/22811987-5993-47b8-88ab-f0665c950567-kube-api-access-xljt8\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.375351 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22811987-5993-47b8-88ab-f0665c950567-config-volume\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.385105 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22811987-5993-47b8-88ab-f0665c950567-secret-volume\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.393157 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xljt8\" (UniqueName: \"kubernetes.io/projected/22811987-5993-47b8-88ab-f0665c950567-kube-api-access-xljt8\") pod \"collect-profiles-29331105-lzfcz\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:00 crc kubenswrapper[4813]: I1007 19:45:00.538692 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:01 crc kubenswrapper[4813]: I1007 19:45:01.052565 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz"] Oct 07 19:45:01 crc kubenswrapper[4813]: W1007 19:45:01.054687 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22811987_5993_47b8_88ab_f0665c950567.slice/crio-6457c6d3f9a24683ad78611035a139e4992e45939067d251d2362f777fb72dc0 WatchSource:0}: Error finding container 6457c6d3f9a24683ad78611035a139e4992e45939067d251d2362f777fb72dc0: Status 404 returned error can't find the container with id 6457c6d3f9a24683ad78611035a139e4992e45939067d251d2362f777fb72dc0 Oct 07 19:45:01 crc kubenswrapper[4813]: I1007 19:45:01.103238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" event={"ID":"22811987-5993-47b8-88ab-f0665c950567","Type":"ContainerStarted","Data":"6457c6d3f9a24683ad78611035a139e4992e45939067d251d2362f777fb72dc0"} Oct 07 19:45:02 crc kubenswrapper[4813]: I1007 19:45:02.116210 4813 generic.go:334] "Generic (PLEG): container finished" podID="22811987-5993-47b8-88ab-f0665c950567" containerID="9b102e0d5827a20c6f9656818feef83638d1d75ba424efc1dad522e5d5ba2f3b" exitCode=0 Oct 07 19:45:02 crc kubenswrapper[4813]: I1007 19:45:02.116283 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" event={"ID":"22811987-5993-47b8-88ab-f0665c950567","Type":"ContainerDied","Data":"9b102e0d5827a20c6f9656818feef83638d1d75ba424efc1dad522e5d5ba2f3b"} Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.540275 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.640611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22811987-5993-47b8-88ab-f0665c950567-secret-volume\") pod \"22811987-5993-47b8-88ab-f0665c950567\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.640658 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22811987-5993-47b8-88ab-f0665c950567-config-volume\") pod \"22811987-5993-47b8-88ab-f0665c950567\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.640886 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xljt8\" (UniqueName: \"kubernetes.io/projected/22811987-5993-47b8-88ab-f0665c950567-kube-api-access-xljt8\") pod \"22811987-5993-47b8-88ab-f0665c950567\" (UID: \"22811987-5993-47b8-88ab-f0665c950567\") " Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.642004 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22811987-5993-47b8-88ab-f0665c950567-config-volume" (OuterVolumeSpecName: "config-volume") pod "22811987-5993-47b8-88ab-f0665c950567" (UID: "22811987-5993-47b8-88ab-f0665c950567"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.651994 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22811987-5993-47b8-88ab-f0665c950567-kube-api-access-xljt8" (OuterVolumeSpecName: "kube-api-access-xljt8") pod "22811987-5993-47b8-88ab-f0665c950567" (UID: "22811987-5993-47b8-88ab-f0665c950567"). InnerVolumeSpecName "kube-api-access-xljt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.653096 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22811987-5993-47b8-88ab-f0665c950567-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "22811987-5993-47b8-88ab-f0665c950567" (UID: "22811987-5993-47b8-88ab-f0665c950567"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.742615 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xljt8\" (UniqueName: \"kubernetes.io/projected/22811987-5993-47b8-88ab-f0665c950567-kube-api-access-xljt8\") on node \"crc\" DevicePath \"\"" Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.742656 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/22811987-5993-47b8-88ab-f0665c950567-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:45:03 crc kubenswrapper[4813]: I1007 19:45:03.742671 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/22811987-5993-47b8-88ab-f0665c950567-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 19:45:04 crc kubenswrapper[4813]: I1007 19:45:04.135629 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" event={"ID":"22811987-5993-47b8-88ab-f0665c950567","Type":"ContainerDied","Data":"6457c6d3f9a24683ad78611035a139e4992e45939067d251d2362f777fb72dc0"} Oct 07 19:45:04 crc kubenswrapper[4813]: I1007 19:45:04.135690 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6457c6d3f9a24683ad78611035a139e4992e45939067d251d2362f777fb72dc0" Oct 07 19:45:04 crc kubenswrapper[4813]: I1007 19:45:04.135774 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz" Oct 07 19:45:07 crc kubenswrapper[4813]: I1007 19:45:07.905675 4813 scope.go:117] "RemoveContainer" containerID="954463ed877a1da5bde94d44c8aa23324d076ea764cb58ad0114403ad460ad7d" Oct 07 19:45:07 crc kubenswrapper[4813]: I1007 19:45:07.965652 4813 scope.go:117] "RemoveContainer" containerID="284db407c3d4776dc2ba72d3a97c56fd5f9d71c83b971aca9750e3b2b311584d" Oct 07 19:45:07 crc kubenswrapper[4813]: I1007 19:45:07.996791 4813 scope.go:117] "RemoveContainer" containerID="c5aae9725308edf30ac4733ae22ac760492ef832926c6c7c718205bafbf94358" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.041270 4813 scope.go:117] "RemoveContainer" containerID="05f60abc9a3a724c847e8a650c7d96e9048f65ad5daf7ea283f074d8432c8a84" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.063651 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-wc2qf"] Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.070458 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-wc2qf"] Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.089298 4813 scope.go:117] "RemoveContainer" containerID="e7adc0732bbee54074cab8a3e1fdf7a171500b6e73adc26878b4884927e1671e" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.116884 4813 scope.go:117] "RemoveContainer" containerID="865a67b26bb514265558f7b9e701dc9cf65ab90d990f6b2349976b6713edc064" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.151421 4813 scope.go:117] "RemoveContainer" containerID="8edb361aa611b6752b45925da9abf8aaff9d6dc46cd831bedf4f48d2029eb028" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.202761 4813 scope.go:117] "RemoveContainer" containerID="6250123bab676b30ece4fdb536cd6d13ef1f0f3f511a7259fb7fdd1fc7cdf3d8" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.219069 4813 scope.go:117] "RemoveContainer" containerID="f9cf75cc060d2be879a21134417d2227dca3b23828d8148054a09752fc97d512" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.248860 4813 scope.go:117] "RemoveContainer" containerID="3bb9be13cd675da54f97286f00f03d0b025d6dd8a865eaa1b9553b9db71dc7fe" Oct 07 19:45:08 crc kubenswrapper[4813]: I1007 19:45:08.615216 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68a76fc8-778e-4878-b798-8c21827833b4" path="/var/lib/kubelet/pods/68a76fc8-778e-4878-b798-8c21827833b4/volumes" Oct 07 19:45:13 crc kubenswrapper[4813]: I1007 19:45:13.603063 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:45:13 crc kubenswrapper[4813]: E1007 19:45:13.603921 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:45:16 crc kubenswrapper[4813]: I1007 19:45:16.048043 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-58vdp"] Oct 07 19:45:16 crc kubenswrapper[4813]: I1007 19:45:16.058365 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-58vdp"] Oct 07 19:45:16 crc kubenswrapper[4813]: I1007 19:45:16.610972 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f039eca-b53a-446b-b219-2b6f2d56a0b4" path="/var/lib/kubelet/pods/8f039eca-b53a-446b-b219-2b6f2d56a0b4/volumes" Oct 07 19:45:21 crc kubenswrapper[4813]: I1007 19:45:21.068540 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-zz7dp"] Oct 07 19:45:21 crc kubenswrapper[4813]: I1007 19:45:21.090202 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-zz7dp"] Oct 07 19:45:22 crc kubenswrapper[4813]: I1007 19:45:22.618880 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0afdc9e8-75a0-4ee8-971b-f2390ea1eff4" path="/var/lib/kubelet/pods/0afdc9e8-75a0-4ee8-971b-f2390ea1eff4/volumes" Oct 07 19:45:24 crc kubenswrapper[4813]: I1007 19:45:24.618284 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:45:24 crc kubenswrapper[4813]: E1007 19:45:24.619189 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:45:34 crc kubenswrapper[4813]: I1007 19:45:34.079568 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-7b94g"] Oct 07 19:45:34 crc kubenswrapper[4813]: I1007 19:45:34.093688 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-7b94g"] Oct 07 19:45:34 crc kubenswrapper[4813]: I1007 19:45:34.622247 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aa4a319-f846-4044-a663-c75e35168316" path="/var/lib/kubelet/pods/7aa4a319-f846-4044-a663-c75e35168316/volumes" Oct 07 19:45:35 crc kubenswrapper[4813]: I1007 19:45:35.602697 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:45:35 crc kubenswrapper[4813]: E1007 19:45:35.603170 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:45:40 crc kubenswrapper[4813]: I1007 19:45:40.042645 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-95b57"] Oct 07 19:45:40 crc kubenswrapper[4813]: I1007 19:45:40.053292 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-95b57"] Oct 07 19:45:40 crc kubenswrapper[4813]: I1007 19:45:40.630614 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37f756c5-2123-4e5b-9c02-f33dd061d767" path="/var/lib/kubelet/pods/37f756c5-2123-4e5b-9c02-f33dd061d767/volumes" Oct 07 19:45:43 crc kubenswrapper[4813]: I1007 19:45:43.535278 4813 generic.go:334] "Generic (PLEG): container finished" podID="4713cec2-7e5d-4d1b-8436-1cd44794b936" containerID="3b64ea0161ddaba16fb267494065ae7646597df502d60964df18509e180bcd4d" exitCode=0 Oct 07 19:45:43 crc kubenswrapper[4813]: I1007 19:45:43.535404 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" event={"ID":"4713cec2-7e5d-4d1b-8436-1cd44794b936","Type":"ContainerDied","Data":"3b64ea0161ddaba16fb267494065ae7646597df502d60964df18509e180bcd4d"} Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.039465 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.209114 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-ssh-key\") pod \"4713cec2-7e5d-4d1b-8436-1cd44794b936\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.209545 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-inventory\") pod \"4713cec2-7e5d-4d1b-8436-1cd44794b936\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.209603 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh7mn\" (UniqueName: \"kubernetes.io/projected/4713cec2-7e5d-4d1b-8436-1cd44794b936-kube-api-access-zh7mn\") pod \"4713cec2-7e5d-4d1b-8436-1cd44794b936\" (UID: \"4713cec2-7e5d-4d1b-8436-1cd44794b936\") " Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.224407 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4713cec2-7e5d-4d1b-8436-1cd44794b936-kube-api-access-zh7mn" (OuterVolumeSpecName: "kube-api-access-zh7mn") pod "4713cec2-7e5d-4d1b-8436-1cd44794b936" (UID: "4713cec2-7e5d-4d1b-8436-1cd44794b936"). InnerVolumeSpecName "kube-api-access-zh7mn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.235528 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-inventory" (OuterVolumeSpecName: "inventory") pod "4713cec2-7e5d-4d1b-8436-1cd44794b936" (UID: "4713cec2-7e5d-4d1b-8436-1cd44794b936"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.242598 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4713cec2-7e5d-4d1b-8436-1cd44794b936" (UID: "4713cec2-7e5d-4d1b-8436-1cd44794b936"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.311840 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.311869 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4713cec2-7e5d-4d1b-8436-1cd44794b936-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.311880 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh7mn\" (UniqueName: \"kubernetes.io/projected/4713cec2-7e5d-4d1b-8436-1cd44794b936-kube-api-access-zh7mn\") on node \"crc\" DevicePath \"\"" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.568402 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.572410 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn" event={"ID":"4713cec2-7e5d-4d1b-8436-1cd44794b936","Type":"ContainerDied","Data":"c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2"} Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.572462 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.718301 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv"] Oct 07 19:45:45 crc kubenswrapper[4813]: E1007 19:45:45.718811 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22811987-5993-47b8-88ab-f0665c950567" containerName="collect-profiles" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.718833 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="22811987-5993-47b8-88ab-f0665c950567" containerName="collect-profiles" Oct 07 19:45:45 crc kubenswrapper[4813]: E1007 19:45:45.718886 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4713cec2-7e5d-4d1b-8436-1cd44794b936" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.718897 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4713cec2-7e5d-4d1b-8436-1cd44794b936" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.719115 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="22811987-5993-47b8-88ab-f0665c950567" containerName="collect-profiles" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.719140 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4713cec2-7e5d-4d1b-8436-1cd44794b936" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.727792 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv"] Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.727926 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.731039 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.731422 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.731501 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.731680 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.822807 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.822893 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.823250 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qlnl\" (UniqueName: \"kubernetes.io/projected/a6b4cff6-9f92-484a-a556-d7b95dcf455f-kube-api-access-4qlnl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: E1007 19:45:45.862634 4813 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4713cec2_7e5d_4d1b_8436_1cd44794b936.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4713cec2_7e5d_4d1b_8436_1cd44794b936.slice/crio-c5444bfd13b056cf211de6ed352bb023485e1ba0f83893ef36827d7746af4dc2\": RecentStats: unable to find data in memory cache]" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.925132 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qlnl\" (UniqueName: \"kubernetes.io/projected/a6b4cff6-9f92-484a-a556-d7b95dcf455f-kube-api-access-4qlnl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.925235 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.925268 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.930593 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.946201 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qlnl\" (UniqueName: \"kubernetes.io/projected/a6b4cff6-9f92-484a-a556-d7b95dcf455f-kube-api-access-4qlnl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:45 crc kubenswrapper[4813]: I1007 19:45:45.963962 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6whsv\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:46 crc kubenswrapper[4813]: I1007 19:45:46.047655 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:45:46 crc kubenswrapper[4813]: I1007 19:45:46.681503 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv"] Oct 07 19:45:47 crc kubenswrapper[4813]: I1007 19:45:47.588782 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" event={"ID":"a6b4cff6-9f92-484a-a556-d7b95dcf455f","Type":"ContainerStarted","Data":"a0e2aaf65a6c2ba268bc055ca1c5a3b6c8bf1d8a2c2746374fda6f09371a8834"} Oct 07 19:45:47 crc kubenswrapper[4813]: I1007 19:45:47.589077 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" event={"ID":"a6b4cff6-9f92-484a-a556-d7b95dcf455f","Type":"ContainerStarted","Data":"3f1c6b32b2416fda394c3805ee483e018f28c012f5583abaae6f4cb1952d6502"} Oct 07 19:45:47 crc kubenswrapper[4813]: I1007 19:45:47.608133 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" podStartSLOduration=2.138112071 podStartE2EDuration="2.608115931s" podCreationTimestamp="2025-10-07 19:45:45 +0000 UTC" firstStartedPulling="2025-10-07 19:45:46.666429155 +0000 UTC m=+1672.744684766" lastFinishedPulling="2025-10-07 19:45:47.136433005 +0000 UTC m=+1673.214688626" observedRunningTime="2025-10-07 19:45:47.604056697 +0000 UTC m=+1673.682312308" watchObservedRunningTime="2025-10-07 19:45:47.608115931 +0000 UTC m=+1673.686371542" Oct 07 19:45:50 crc kubenswrapper[4813]: I1007 19:45:50.603100 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:45:50 crc kubenswrapper[4813]: E1007 19:45:50.604012 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:46:04 crc kubenswrapper[4813]: I1007 19:46:04.610596 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:46:04 crc kubenswrapper[4813]: E1007 19:46:04.611869 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:46:08 crc kubenswrapper[4813]: I1007 19:46:08.415107 4813 scope.go:117] "RemoveContainer" containerID="143c3cee153b0f5795746ee4a66869d82db3cc6a0f7cb52fe7e2148858195334" Oct 07 19:46:08 crc kubenswrapper[4813]: I1007 19:46:08.466554 4813 scope.go:117] "RemoveContainer" containerID="d691b8fa255cb3b06af0791bdd2b36ababd7a25b7e0e52f6fd0d37720df16813" Oct 07 19:46:08 crc kubenswrapper[4813]: I1007 19:46:08.505644 4813 scope.go:117] "RemoveContainer" containerID="0c3a5d4f667b23b850823ffa9053ac0bbbcc99f2832056baca7c080460401a8a" Oct 07 19:46:08 crc kubenswrapper[4813]: I1007 19:46:08.558304 4813 scope.go:117] "RemoveContainer" containerID="42c2e9110de38426cafde0757204fc89c4f9e9f0719a2595b44eef8eabcbe6c8" Oct 07 19:46:08 crc kubenswrapper[4813]: I1007 19:46:08.598035 4813 scope.go:117] "RemoveContainer" containerID="6ffd161112faed47b3718113f640981370ac7eb1e1280fe19d1a46cadf21215e" Oct 07 19:46:16 crc kubenswrapper[4813]: I1007 19:46:16.603151 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:46:16 crc kubenswrapper[4813]: E1007 19:46:16.604021 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:46:19 crc kubenswrapper[4813]: I1007 19:46:19.050117 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-5qmtk"] Oct 07 19:46:19 crc kubenswrapper[4813]: I1007 19:46:19.065713 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-gwx5z"] Oct 07 19:46:19 crc kubenswrapper[4813]: I1007 19:46:19.075299 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-hln5n"] Oct 07 19:46:19 crc kubenswrapper[4813]: I1007 19:46:19.085405 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-5qmtk"] Oct 07 19:46:19 crc kubenswrapper[4813]: I1007 19:46:19.094739 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-gwx5z"] Oct 07 19:46:19 crc kubenswrapper[4813]: I1007 19:46:19.103242 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-hln5n"] Oct 07 19:46:20 crc kubenswrapper[4813]: I1007 19:46:20.623522 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0969e89-a3e6-4d6d-80f3-381d112f949d" path="/var/lib/kubelet/pods/b0969e89-a3e6-4d6d-80f3-381d112f949d/volumes" Oct 07 19:46:20 crc kubenswrapper[4813]: I1007 19:46:20.626435 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c803f2bd-9bf8-43fa-af9a-3a61bdac0972" path="/var/lib/kubelet/pods/c803f2bd-9bf8-43fa-af9a-3a61bdac0972/volumes" Oct 07 19:46:20 crc kubenswrapper[4813]: I1007 19:46:20.628106 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f60ece8a-8926-46ef-9ae9-126856f4b1b5" path="/var/lib/kubelet/pods/f60ece8a-8926-46ef-9ae9-126856f4b1b5/volumes" Oct 07 19:46:28 crc kubenswrapper[4813]: I1007 19:46:28.603587 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:46:28 crc kubenswrapper[4813]: E1007 19:46:28.604943 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:46:29 crc kubenswrapper[4813]: I1007 19:46:29.029817 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a0b6-account-create-zz2gl"] Oct 07 19:46:29 crc kubenswrapper[4813]: I1007 19:46:29.041197 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a0b6-account-create-zz2gl"] Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.033207 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-1254-account-create-m5qfr"] Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.050516 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4d0c-account-create-gkbfl"] Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.062598 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-1254-account-create-m5qfr"] Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.069444 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4d0c-account-create-gkbfl"] Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.617675 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="311a453f-d885-474f-a1e1-2c892940fdb0" path="/var/lib/kubelet/pods/311a453f-d885-474f-a1e1-2c892940fdb0/volumes" Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.618930 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0eed1e1-e367-4fec-a708-c20a5c871719" path="/var/lib/kubelet/pods/c0eed1e1-e367-4fec-a708-c20a5c871719/volumes" Oct 07 19:46:30 crc kubenswrapper[4813]: I1007 19:46:30.619648 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f203d9f1-8903-4490-bb6e-fe1e9d9988e4" path="/var/lib/kubelet/pods/f203d9f1-8903-4490-bb6e-fe1e9d9988e4/volumes" Oct 07 19:46:41 crc kubenswrapper[4813]: I1007 19:46:41.603233 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:46:41 crc kubenswrapper[4813]: E1007 19:46:41.604392 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:46:54 crc kubenswrapper[4813]: I1007 19:46:54.618425 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:46:54 crc kubenswrapper[4813]: E1007 19:46:54.621163 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:46:58 crc kubenswrapper[4813]: I1007 19:46:58.043667 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vr57t"] Oct 07 19:46:58 crc kubenswrapper[4813]: I1007 19:46:58.054837 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vr57t"] Oct 07 19:46:58 crc kubenswrapper[4813]: I1007 19:46:58.614656 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0057db6e-821d-4404-bc89-2a03563c71d2" path="/var/lib/kubelet/pods/0057db6e-821d-4404-bc89-2a03563c71d2/volumes" Oct 07 19:47:07 crc kubenswrapper[4813]: I1007 19:47:07.603893 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:47:07 crc kubenswrapper[4813]: E1007 19:47:07.604495 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:47:08 crc kubenswrapper[4813]: I1007 19:47:08.780763 4813 scope.go:117] "RemoveContainer" containerID="e9ff2fa9cd9fd0141162ef0ed37e05fcfcf4074ccf2d15deb75ac532fb5062a7" Oct 07 19:47:08 crc kubenswrapper[4813]: I1007 19:47:08.816391 4813 scope.go:117] "RemoveContainer" containerID="8d82199ccc9b0360971dc8523bcf463552830858fa67f8f03c308c99c72cebbe" Oct 07 19:47:08 crc kubenswrapper[4813]: I1007 19:47:08.894158 4813 scope.go:117] "RemoveContainer" containerID="1a85a2a8d49436c0be6af82fe48355c244a2b168b57de9028eab0ce5a27c71c6" Oct 07 19:47:08 crc kubenswrapper[4813]: I1007 19:47:08.918254 4813 scope.go:117] "RemoveContainer" containerID="c46c699ded93e6218c7c209945fad38f61b4069a65e8ad59eace5ab97176021f" Oct 07 19:47:08 crc kubenswrapper[4813]: I1007 19:47:08.991129 4813 scope.go:117] "RemoveContainer" containerID="62d244cc4322b3e5f4fa64a6748772dabb9b28797e5ef17a7c08291f4321275d" Oct 07 19:47:09 crc kubenswrapper[4813]: I1007 19:47:09.010220 4813 scope.go:117] "RemoveContainer" containerID="32dce849bc269113a34ace501a116e4331ac899be38f14dd1d052795ca870f1a" Oct 07 19:47:09 crc kubenswrapper[4813]: I1007 19:47:09.054138 4813 scope.go:117] "RemoveContainer" containerID="4b1061e097dab9aabc6c3919f0168d342dcf30b60160f9409e6055aeb332f1df" Oct 07 19:47:10 crc kubenswrapper[4813]: I1007 19:47:10.544686 4813 generic.go:334] "Generic (PLEG): container finished" podID="a6b4cff6-9f92-484a-a556-d7b95dcf455f" containerID="a0e2aaf65a6c2ba268bc055ca1c5a3b6c8bf1d8a2c2746374fda6f09371a8834" exitCode=0 Oct 07 19:47:10 crc kubenswrapper[4813]: I1007 19:47:10.545027 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" event={"ID":"a6b4cff6-9f92-484a-a556-d7b95dcf455f","Type":"ContainerDied","Data":"a0e2aaf65a6c2ba268bc055ca1c5a3b6c8bf1d8a2c2746374fda6f09371a8834"} Oct 07 19:47:11 crc kubenswrapper[4813]: I1007 19:47:11.976856 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.036370 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-inventory\") pod \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.036468 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qlnl\" (UniqueName: \"kubernetes.io/projected/a6b4cff6-9f92-484a-a556-d7b95dcf455f-kube-api-access-4qlnl\") pod \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.036537 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-ssh-key\") pod \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\" (UID: \"a6b4cff6-9f92-484a-a556-d7b95dcf455f\") " Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.042897 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6b4cff6-9f92-484a-a556-d7b95dcf455f-kube-api-access-4qlnl" (OuterVolumeSpecName: "kube-api-access-4qlnl") pod "a6b4cff6-9f92-484a-a556-d7b95dcf455f" (UID: "a6b4cff6-9f92-484a-a556-d7b95dcf455f"). InnerVolumeSpecName "kube-api-access-4qlnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.072198 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-inventory" (OuterVolumeSpecName: "inventory") pod "a6b4cff6-9f92-484a-a556-d7b95dcf455f" (UID: "a6b4cff6-9f92-484a-a556-d7b95dcf455f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.091540 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a6b4cff6-9f92-484a-a556-d7b95dcf455f" (UID: "a6b4cff6-9f92-484a-a556-d7b95dcf455f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.138276 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.138313 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a6b4cff6-9f92-484a-a556-d7b95dcf455f-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.138358 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qlnl\" (UniqueName: \"kubernetes.io/projected/a6b4cff6-9f92-484a-a556-d7b95dcf455f-kube-api-access-4qlnl\") on node \"crc\" DevicePath \"\"" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.567683 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" event={"ID":"a6b4cff6-9f92-484a-a556-d7b95dcf455f","Type":"ContainerDied","Data":"3f1c6b32b2416fda394c3805ee483e018f28c012f5583abaae6f4cb1952d6502"} Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.567733 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6whsv" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.567748 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f1c6b32b2416fda394c3805ee483e018f28c012f5583abaae6f4cb1952d6502" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.776200 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km"] Oct 07 19:47:12 crc kubenswrapper[4813]: E1007 19:47:12.776736 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6b4cff6-9f92-484a-a556-d7b95dcf455f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.776760 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6b4cff6-9f92-484a-a556-d7b95dcf455f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.777012 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6b4cff6-9f92-484a-a556-d7b95dcf455f" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.777768 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.781604 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.782651 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.782893 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.783053 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.803612 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km"] Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.853376 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkzv8\" (UniqueName: \"kubernetes.io/projected/6db1d1eb-2150-4a66-bdae-015b651da395-kube-api-access-jkzv8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.853709 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.853828 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.955022 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.955098 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.955177 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkzv8\" (UniqueName: \"kubernetes.io/projected/6db1d1eb-2150-4a66-bdae-015b651da395-kube-api-access-jkzv8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.961098 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.968722 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:12 crc kubenswrapper[4813]: I1007 19:47:12.987121 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkzv8\" (UniqueName: \"kubernetes.io/projected/6db1d1eb-2150-4a66-bdae-015b651da395-kube-api-access-jkzv8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fz2km\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:13 crc kubenswrapper[4813]: I1007 19:47:13.108613 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:13 crc kubenswrapper[4813]: I1007 19:47:13.662798 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km"] Oct 07 19:47:14 crc kubenswrapper[4813]: I1007 19:47:14.585901 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" event={"ID":"6db1d1eb-2150-4a66-bdae-015b651da395","Type":"ContainerStarted","Data":"44bd5f719b126cd6dc16ed188ead99c00a246cb11eedfe15601e84de404ec09f"} Oct 07 19:47:15 crc kubenswrapper[4813]: I1007 19:47:15.595980 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" event={"ID":"6db1d1eb-2150-4a66-bdae-015b651da395","Type":"ContainerStarted","Data":"bf690b6df3660ff72643a6bb2ed5e543b1cf06f38f5b8f8e58e0faa9768c5ba3"} Oct 07 19:47:15 crc kubenswrapper[4813]: I1007 19:47:15.619093 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" podStartSLOduration=2.895756875 podStartE2EDuration="3.619069471s" podCreationTimestamp="2025-10-07 19:47:12 +0000 UTC" firstStartedPulling="2025-10-07 19:47:13.671070777 +0000 UTC m=+1759.749326428" lastFinishedPulling="2025-10-07 19:47:14.394383413 +0000 UTC m=+1760.472639024" observedRunningTime="2025-10-07 19:47:15.612890103 +0000 UTC m=+1761.691145754" watchObservedRunningTime="2025-10-07 19:47:15.619069471 +0000 UTC m=+1761.697325082" Oct 07 19:47:18 crc kubenswrapper[4813]: I1007 19:47:18.057241 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-zc5wc"] Oct 07 19:47:18 crc kubenswrapper[4813]: I1007 19:47:18.064908 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-zc5wc"] Oct 07 19:47:18 crc kubenswrapper[4813]: I1007 19:47:18.623081 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f455f53a-b378-4366-bb40-4e155e06a6b4" path="/var/lib/kubelet/pods/f455f53a-b378-4366-bb40-4e155e06a6b4/volumes" Oct 07 19:47:20 crc kubenswrapper[4813]: I1007 19:47:20.043573 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xdm2h"] Oct 07 19:47:20 crc kubenswrapper[4813]: I1007 19:47:20.052900 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-xdm2h"] Oct 07 19:47:20 crc kubenswrapper[4813]: I1007 19:47:20.623924 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2272b0c8-98de-47c8-9116-fff8e83fb1b1" path="/var/lib/kubelet/pods/2272b0c8-98de-47c8-9116-fff8e83fb1b1/volumes" Oct 07 19:47:20 crc kubenswrapper[4813]: I1007 19:47:20.650168 4813 generic.go:334] "Generic (PLEG): container finished" podID="6db1d1eb-2150-4a66-bdae-015b651da395" containerID="bf690b6df3660ff72643a6bb2ed5e543b1cf06f38f5b8f8e58e0faa9768c5ba3" exitCode=0 Oct 07 19:47:20 crc kubenswrapper[4813]: I1007 19:47:20.650251 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" event={"ID":"6db1d1eb-2150-4a66-bdae-015b651da395","Type":"ContainerDied","Data":"bf690b6df3660ff72643a6bb2ed5e543b1cf06f38f5b8f8e58e0faa9768c5ba3"} Oct 07 19:47:21 crc kubenswrapper[4813]: I1007 19:47:21.602834 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:47:21 crc kubenswrapper[4813]: E1007 19:47:21.603723 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.192449 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.275625 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-ssh-key\") pod \"6db1d1eb-2150-4a66-bdae-015b651da395\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.276082 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-inventory\") pod \"6db1d1eb-2150-4a66-bdae-015b651da395\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.276185 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkzv8\" (UniqueName: \"kubernetes.io/projected/6db1d1eb-2150-4a66-bdae-015b651da395-kube-api-access-jkzv8\") pod \"6db1d1eb-2150-4a66-bdae-015b651da395\" (UID: \"6db1d1eb-2150-4a66-bdae-015b651da395\") " Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.285611 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6db1d1eb-2150-4a66-bdae-015b651da395-kube-api-access-jkzv8" (OuterVolumeSpecName: "kube-api-access-jkzv8") pod "6db1d1eb-2150-4a66-bdae-015b651da395" (UID: "6db1d1eb-2150-4a66-bdae-015b651da395"). InnerVolumeSpecName "kube-api-access-jkzv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.309818 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-inventory" (OuterVolumeSpecName: "inventory") pod "6db1d1eb-2150-4a66-bdae-015b651da395" (UID: "6db1d1eb-2150-4a66-bdae-015b651da395"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.313537 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6db1d1eb-2150-4a66-bdae-015b651da395" (UID: "6db1d1eb-2150-4a66-bdae-015b651da395"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.377466 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkzv8\" (UniqueName: \"kubernetes.io/projected/6db1d1eb-2150-4a66-bdae-015b651da395-kube-api-access-jkzv8\") on node \"crc\" DevicePath \"\"" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.377629 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.377729 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6db1d1eb-2150-4a66-bdae-015b651da395-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.679549 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" event={"ID":"6db1d1eb-2150-4a66-bdae-015b651da395","Type":"ContainerDied","Data":"44bd5f719b126cd6dc16ed188ead99c00a246cb11eedfe15601e84de404ec09f"} Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.679618 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44bd5f719b126cd6dc16ed188ead99c00a246cb11eedfe15601e84de404ec09f" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.679752 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fz2km" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.774011 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r"] Oct 07 19:47:22 crc kubenswrapper[4813]: E1007 19:47:22.774582 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db1d1eb-2150-4a66-bdae-015b651da395" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.774611 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db1d1eb-2150-4a66-bdae-015b651da395" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.774968 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db1d1eb-2150-4a66-bdae-015b651da395" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.775858 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.777949 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.779303 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.779565 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.779931 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.786808 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r"] Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.790373 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.790445 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.790619 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5npkq\" (UniqueName: \"kubernetes.io/projected/b7876782-6cc3-47e2-ab62-b9082196a5c8-kube-api-access-5npkq\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.892160 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5npkq\" (UniqueName: \"kubernetes.io/projected/b7876782-6cc3-47e2-ab62-b9082196a5c8-kube-api-access-5npkq\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.892239 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.892275 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.895871 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.896731 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:22 crc kubenswrapper[4813]: I1007 19:47:22.910979 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5npkq\" (UniqueName: \"kubernetes.io/projected/b7876782-6cc3-47e2-ab62-b9082196a5c8-kube-api-access-5npkq\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-25k9r\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:23 crc kubenswrapper[4813]: I1007 19:47:23.093025 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:47:23 crc kubenswrapper[4813]: I1007 19:47:23.691705 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r"] Oct 07 19:47:24 crc kubenswrapper[4813]: I1007 19:47:24.697000 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" event={"ID":"b7876782-6cc3-47e2-ab62-b9082196a5c8","Type":"ContainerStarted","Data":"b9580c263dea1956254d636870679f1f4b2b0833a063cff78ef68963397d14f3"} Oct 07 19:47:24 crc kubenswrapper[4813]: I1007 19:47:24.697259 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" event={"ID":"b7876782-6cc3-47e2-ab62-b9082196a5c8","Type":"ContainerStarted","Data":"a2c1ec9fab2532764b2a97de246e7b1d194276792c7f43501d2b94968ead72b8"} Oct 07 19:47:32 crc kubenswrapper[4813]: I1007 19:47:32.605149 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:47:32 crc kubenswrapper[4813]: E1007 19:47:32.606155 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:47:47 crc kubenswrapper[4813]: I1007 19:47:47.603103 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:47:47 crc kubenswrapper[4813]: E1007 19:47:47.603907 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:48:02 crc kubenswrapper[4813]: I1007 19:48:02.603025 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:48:02 crc kubenswrapper[4813]: E1007 19:48:02.605106 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:48:03 crc kubenswrapper[4813]: I1007 19:48:03.077246 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" podStartSLOduration=40.569050946 podStartE2EDuration="41.077225692s" podCreationTimestamp="2025-10-07 19:47:22 +0000 UTC" firstStartedPulling="2025-10-07 19:47:23.706501031 +0000 UTC m=+1769.784756642" lastFinishedPulling="2025-10-07 19:47:24.214675777 +0000 UTC m=+1770.292931388" observedRunningTime="2025-10-07 19:47:24.726669023 +0000 UTC m=+1770.804924634" watchObservedRunningTime="2025-10-07 19:48:03.077225692 +0000 UTC m=+1809.155481313" Oct 07 19:48:03 crc kubenswrapper[4813]: I1007 19:48:03.078341 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-p98qg"] Oct 07 19:48:03 crc kubenswrapper[4813]: I1007 19:48:03.086051 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-p98qg"] Oct 07 19:48:04 crc kubenswrapper[4813]: I1007 19:48:04.617309 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7b45b6-c0c5-43d8-be53-5278bf7fde77" path="/var/lib/kubelet/pods/4e7b45b6-c0c5-43d8-be53-5278bf7fde77/volumes" Oct 07 19:48:09 crc kubenswrapper[4813]: I1007 19:48:09.214916 4813 generic.go:334] "Generic (PLEG): container finished" podID="b7876782-6cc3-47e2-ab62-b9082196a5c8" containerID="b9580c263dea1956254d636870679f1f4b2b0833a063cff78ef68963397d14f3" exitCode=0 Oct 07 19:48:09 crc kubenswrapper[4813]: I1007 19:48:09.215145 4813 scope.go:117] "RemoveContainer" containerID="394f16cc57875dc8c3e48f15387096bfd62e91ea4af9fff21389f55e93e0e8b6" Oct 07 19:48:09 crc kubenswrapper[4813]: I1007 19:48:09.215249 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" event={"ID":"b7876782-6cc3-47e2-ab62-b9082196a5c8","Type":"ContainerDied","Data":"b9580c263dea1956254d636870679f1f4b2b0833a063cff78ef68963397d14f3"} Oct 07 19:48:09 crc kubenswrapper[4813]: I1007 19:48:09.279052 4813 scope.go:117] "RemoveContainer" containerID="5fdcf944836548978a1dc0dafcba17f57f5de9629251d04905344d40867f3a0f" Oct 07 19:48:09 crc kubenswrapper[4813]: I1007 19:48:09.325654 4813 scope.go:117] "RemoveContainer" containerID="a15afe4cc950228d4f306475b429262d93db356e0a2e29f5dccc366b0a7d6db2" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.624226 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.812501 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-inventory\") pod \"b7876782-6cc3-47e2-ab62-b9082196a5c8\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.813054 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5npkq\" (UniqueName: \"kubernetes.io/projected/b7876782-6cc3-47e2-ab62-b9082196a5c8-kube-api-access-5npkq\") pod \"b7876782-6cc3-47e2-ab62-b9082196a5c8\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.813087 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-ssh-key\") pod \"b7876782-6cc3-47e2-ab62-b9082196a5c8\" (UID: \"b7876782-6cc3-47e2-ab62-b9082196a5c8\") " Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.817671 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7876782-6cc3-47e2-ab62-b9082196a5c8-kube-api-access-5npkq" (OuterVolumeSpecName: "kube-api-access-5npkq") pod "b7876782-6cc3-47e2-ab62-b9082196a5c8" (UID: "b7876782-6cc3-47e2-ab62-b9082196a5c8"). InnerVolumeSpecName "kube-api-access-5npkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.839004 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-inventory" (OuterVolumeSpecName: "inventory") pod "b7876782-6cc3-47e2-ab62-b9082196a5c8" (UID: "b7876782-6cc3-47e2-ab62-b9082196a5c8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.856903 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b7876782-6cc3-47e2-ab62-b9082196a5c8" (UID: "b7876782-6cc3-47e2-ab62-b9082196a5c8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.915704 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5npkq\" (UniqueName: \"kubernetes.io/projected/b7876782-6cc3-47e2-ab62-b9082196a5c8-kube-api-access-5npkq\") on node \"crc\" DevicePath \"\"" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.915755 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:48:10 crc kubenswrapper[4813]: I1007 19:48:10.915765 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7876782-6cc3-47e2-ab62-b9082196a5c8-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.248420 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" event={"ID":"b7876782-6cc3-47e2-ab62-b9082196a5c8","Type":"ContainerDied","Data":"a2c1ec9fab2532764b2a97de246e7b1d194276792c7f43501d2b94968ead72b8"} Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.248460 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2c1ec9fab2532764b2a97de246e7b1d194276792c7f43501d2b94968ead72b8" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.248478 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-25k9r" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.417168 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh"] Oct 07 19:48:11 crc kubenswrapper[4813]: E1007 19:48:11.417751 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7876782-6cc3-47e2-ab62-b9082196a5c8" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.417825 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7876782-6cc3-47e2-ab62-b9082196a5c8" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.418070 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7876782-6cc3-47e2-ab62-b9082196a5c8" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.418713 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.421779 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.421943 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.422069 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.422504 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.433127 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh"] Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.528979 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gh68\" (UniqueName: \"kubernetes.io/projected/b27c1155-4bc4-4d5d-b782-418c675819d6-kube-api-access-9gh68\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.529152 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.529174 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.631048 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.631090 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.631141 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gh68\" (UniqueName: \"kubernetes.io/projected/b27c1155-4bc4-4d5d-b782-418c675819d6-kube-api-access-9gh68\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.637204 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.646562 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.656500 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gh68\" (UniqueName: \"kubernetes.io/projected/b27c1155-4bc4-4d5d-b782-418c675819d6-kube-api-access-9gh68\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:11 crc kubenswrapper[4813]: I1007 19:48:11.736097 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:48:12 crc kubenswrapper[4813]: I1007 19:48:12.311746 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh"] Oct 07 19:48:12 crc kubenswrapper[4813]: I1007 19:48:12.324043 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:48:13 crc kubenswrapper[4813]: I1007 19:48:13.271094 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" event={"ID":"b27c1155-4bc4-4d5d-b782-418c675819d6","Type":"ContainerStarted","Data":"c2ed39ef96fdc4a64a41cdd7048be3a3ae262f5d8aa887c4f9d18a27d4f90541"} Oct 07 19:48:13 crc kubenswrapper[4813]: I1007 19:48:13.272767 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" event={"ID":"b27c1155-4bc4-4d5d-b782-418c675819d6","Type":"ContainerStarted","Data":"8467457f256bb09474e9a77a5c895eb2dc7aede215fb9d01a3f0435b532d0f9d"} Oct 07 19:48:13 crc kubenswrapper[4813]: I1007 19:48:13.612274 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:48:13 crc kubenswrapper[4813]: E1007 19:48:13.616803 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:48:26 crc kubenswrapper[4813]: I1007 19:48:26.603530 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:48:27 crc kubenswrapper[4813]: I1007 19:48:27.418929 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"3469e74d2f3bae111434c4527abbecf7675c22b5856d0240e0ba28b9c5f98470"} Oct 07 19:48:27 crc kubenswrapper[4813]: I1007 19:48:27.453438 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" podStartSLOduration=15.965040673 podStartE2EDuration="16.453409599s" podCreationTimestamp="2025-10-07 19:48:11 +0000 UTC" firstStartedPulling="2025-10-07 19:48:12.323597806 +0000 UTC m=+1818.401853417" lastFinishedPulling="2025-10-07 19:48:12.811966692 +0000 UTC m=+1818.890222343" observedRunningTime="2025-10-07 19:48:13.300306266 +0000 UTC m=+1819.378561907" watchObservedRunningTime="2025-10-07 19:48:27.453409599 +0000 UTC m=+1833.531665250" Oct 07 19:49:14 crc kubenswrapper[4813]: I1007 19:49:14.964413 4813 generic.go:334] "Generic (PLEG): container finished" podID="b27c1155-4bc4-4d5d-b782-418c675819d6" containerID="c2ed39ef96fdc4a64a41cdd7048be3a3ae262f5d8aa887c4f9d18a27d4f90541" exitCode=2 Oct 07 19:49:14 crc kubenswrapper[4813]: I1007 19:49:14.964507 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" event={"ID":"b27c1155-4bc4-4d5d-b782-418c675819d6","Type":"ContainerDied","Data":"c2ed39ef96fdc4a64a41cdd7048be3a3ae262f5d8aa887c4f9d18a27d4f90541"} Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.384782 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.561393 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-ssh-key\") pod \"b27c1155-4bc4-4d5d-b782-418c675819d6\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.561453 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-inventory\") pod \"b27c1155-4bc4-4d5d-b782-418c675819d6\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.561575 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gh68\" (UniqueName: \"kubernetes.io/projected/b27c1155-4bc4-4d5d-b782-418c675819d6-kube-api-access-9gh68\") pod \"b27c1155-4bc4-4d5d-b782-418c675819d6\" (UID: \"b27c1155-4bc4-4d5d-b782-418c675819d6\") " Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.567675 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b27c1155-4bc4-4d5d-b782-418c675819d6-kube-api-access-9gh68" (OuterVolumeSpecName: "kube-api-access-9gh68") pod "b27c1155-4bc4-4d5d-b782-418c675819d6" (UID: "b27c1155-4bc4-4d5d-b782-418c675819d6"). InnerVolumeSpecName "kube-api-access-9gh68". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.598533 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-inventory" (OuterVolumeSpecName: "inventory") pod "b27c1155-4bc4-4d5d-b782-418c675819d6" (UID: "b27c1155-4bc4-4d5d-b782-418c675819d6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.603641 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b27c1155-4bc4-4d5d-b782-418c675819d6" (UID: "b27c1155-4bc4-4d5d-b782-418c675819d6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.666178 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.666210 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b27c1155-4bc4-4d5d-b782-418c675819d6-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.666224 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gh68\" (UniqueName: \"kubernetes.io/projected/b27c1155-4bc4-4d5d-b782-418c675819d6-kube-api-access-9gh68\") on node \"crc\" DevicePath \"\"" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.987247 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" event={"ID":"b27c1155-4bc4-4d5d-b782-418c675819d6","Type":"ContainerDied","Data":"8467457f256bb09474e9a77a5c895eb2dc7aede215fb9d01a3f0435b532d0f9d"} Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.987296 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8467457f256bb09474e9a77a5c895eb2dc7aede215fb9d01a3f0435b532d0f9d" Oct 07 19:49:16 crc kubenswrapper[4813]: I1007 19:49:16.987305 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.036621 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn"] Oct 07 19:49:24 crc kubenswrapper[4813]: E1007 19:49:24.038099 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b27c1155-4bc4-4d5d-b782-418c675819d6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.038134 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b27c1155-4bc4-4d5d-b782-418c675819d6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.038676 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b27c1155-4bc4-4d5d-b782-418c675819d6" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.040058 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.047262 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.047788 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.047954 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn"] Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.049998 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.051242 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.141294 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.141451 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb6jm\" (UniqueName: \"kubernetes.io/projected/5eda0149-d966-4253-9bb0-0bddbaaa29f1-kube-api-access-sb6jm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.141706 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.244262 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.244388 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb6jm\" (UniqueName: \"kubernetes.io/projected/5eda0149-d966-4253-9bb0-0bddbaaa29f1-kube-api-access-sb6jm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.244591 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.254069 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.255760 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.279721 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb6jm\" (UniqueName: \"kubernetes.io/projected/5eda0149-d966-4253-9bb0-0bddbaaa29f1-kube-api-access-sb6jm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.382230 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:49:24 crc kubenswrapper[4813]: I1007 19:49:24.993963 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn"] Oct 07 19:49:25 crc kubenswrapper[4813]: I1007 19:49:25.100305 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" event={"ID":"5eda0149-d966-4253-9bb0-0bddbaaa29f1","Type":"ContainerStarted","Data":"b2f15901fa5e5d1864474f6e05370b771ca095434ac6cc0917c97a25e0a5f6ca"} Oct 07 19:49:26 crc kubenswrapper[4813]: I1007 19:49:26.111227 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" event={"ID":"5eda0149-d966-4253-9bb0-0bddbaaa29f1","Type":"ContainerStarted","Data":"d6aa6dff55a94d24168deaf1f4b0acce5032c4ca145eca72f166ba5e7e5c2868"} Oct 07 19:49:26 crc kubenswrapper[4813]: I1007 19:49:26.132541 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" podStartSLOduration=1.586386318 podStartE2EDuration="2.132523677s" podCreationTimestamp="2025-10-07 19:49:24 +0000 UTC" firstStartedPulling="2025-10-07 19:49:25.001930458 +0000 UTC m=+1891.080186089" lastFinishedPulling="2025-10-07 19:49:25.548067807 +0000 UTC m=+1891.626323448" observedRunningTime="2025-10-07 19:49:26.124715334 +0000 UTC m=+1892.202970965" watchObservedRunningTime="2025-10-07 19:49:26.132523677 +0000 UTC m=+1892.210779288" Oct 07 19:50:22 crc kubenswrapper[4813]: I1007 19:50:22.701692 4813 generic.go:334] "Generic (PLEG): container finished" podID="5eda0149-d966-4253-9bb0-0bddbaaa29f1" containerID="d6aa6dff55a94d24168deaf1f4b0acce5032c4ca145eca72f166ba5e7e5c2868" exitCode=0 Oct 07 19:50:22 crc kubenswrapper[4813]: I1007 19:50:22.701779 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" event={"ID":"5eda0149-d966-4253-9bb0-0bddbaaa29f1","Type":"ContainerDied","Data":"d6aa6dff55a94d24168deaf1f4b0acce5032c4ca145eca72f166ba5e7e5c2868"} Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.230862 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.319030 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-ssh-key\") pod \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.319411 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6jm\" (UniqueName: \"kubernetes.io/projected/5eda0149-d966-4253-9bb0-0bddbaaa29f1-kube-api-access-sb6jm\") pod \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.319502 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-inventory\") pod \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\" (UID: \"5eda0149-d966-4253-9bb0-0bddbaaa29f1\") " Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.327754 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eda0149-d966-4253-9bb0-0bddbaaa29f1-kube-api-access-sb6jm" (OuterVolumeSpecName: "kube-api-access-sb6jm") pod "5eda0149-d966-4253-9bb0-0bddbaaa29f1" (UID: "5eda0149-d966-4253-9bb0-0bddbaaa29f1"). InnerVolumeSpecName "kube-api-access-sb6jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.347547 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5eda0149-d966-4253-9bb0-0bddbaaa29f1" (UID: "5eda0149-d966-4253-9bb0-0bddbaaa29f1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.349152 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-inventory" (OuterVolumeSpecName: "inventory") pod "5eda0149-d966-4253-9bb0-0bddbaaa29f1" (UID: "5eda0149-d966-4253-9bb0-0bddbaaa29f1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.421706 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.421807 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6jm\" (UniqueName: \"kubernetes.io/projected/5eda0149-d966-4253-9bb0-0bddbaaa29f1-kube-api-access-sb6jm\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.421834 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eda0149-d966-4253-9bb0-0bddbaaa29f1-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.742548 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" event={"ID":"5eda0149-d966-4253-9bb0-0bddbaaa29f1","Type":"ContainerDied","Data":"b2f15901fa5e5d1864474f6e05370b771ca095434ac6cc0917c97a25e0a5f6ca"} Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.742587 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2f15901fa5e5d1864474f6e05370b771ca095434ac6cc0917c97a25e0a5f6ca" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.742658 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.846835 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-z5kbp"] Oct 07 19:50:24 crc kubenswrapper[4813]: E1007 19:50:24.847239 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eda0149-d966-4253-9bb0-0bddbaaa29f1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.847257 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eda0149-d966-4253-9bb0-0bddbaaa29f1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.847473 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eda0149-d966-4253-9bb0-0bddbaaa29f1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.848137 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.850365 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.850531 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.850659 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.864925 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.872578 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-z5kbp"] Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.935743 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.935796 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwfzf\" (UniqueName: \"kubernetes.io/projected/4a506120-df34-41d9-b92a-9e8944c15dcf-kube-api-access-nwfzf\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:24 crc kubenswrapper[4813]: I1007 19:50:24.935952 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.037771 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.037822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwfzf\" (UniqueName: \"kubernetes.io/projected/4a506120-df34-41d9-b92a-9e8944c15dcf-kube-api-access-nwfzf\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.037900 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.041941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.043205 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.057593 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwfzf\" (UniqueName: \"kubernetes.io/projected/4a506120-df34-41d9-b92a-9e8944c15dcf-kube-api-access-nwfzf\") pod \"ssh-known-hosts-edpm-deployment-z5kbp\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.165689 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.712141 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-z5kbp"] Oct 07 19:50:25 crc kubenswrapper[4813]: I1007 19:50:25.751458 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" event={"ID":"4a506120-df34-41d9-b92a-9e8944c15dcf","Type":"ContainerStarted","Data":"3b8072667d074665379dc23347fd8d1b66466a5409ea8521476a629be86a7791"} Oct 07 19:50:26 crc kubenswrapper[4813]: I1007 19:50:26.762701 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" event={"ID":"4a506120-df34-41d9-b92a-9e8944c15dcf","Type":"ContainerStarted","Data":"7f959ebe1ac96ee33cc1039601c50b8b0b8c88a9f5fd63b91a0d1b44669286fc"} Oct 07 19:50:26 crc kubenswrapper[4813]: I1007 19:50:26.792122 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" podStartSLOduration=2.299624821 podStartE2EDuration="2.792105232s" podCreationTimestamp="2025-10-07 19:50:24 +0000 UTC" firstStartedPulling="2025-10-07 19:50:25.720302128 +0000 UTC m=+1951.798557739" lastFinishedPulling="2025-10-07 19:50:26.212782529 +0000 UTC m=+1952.291038150" observedRunningTime="2025-10-07 19:50:26.786267656 +0000 UTC m=+1952.864523307" watchObservedRunningTime="2025-10-07 19:50:26.792105232 +0000 UTC m=+1952.870360843" Oct 07 19:50:34 crc kubenswrapper[4813]: I1007 19:50:34.861555 4813 generic.go:334] "Generic (PLEG): container finished" podID="4a506120-df34-41d9-b92a-9e8944c15dcf" containerID="7f959ebe1ac96ee33cc1039601c50b8b0b8c88a9f5fd63b91a0d1b44669286fc" exitCode=0 Oct 07 19:50:34 crc kubenswrapper[4813]: I1007 19:50:34.861622 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" event={"ID":"4a506120-df34-41d9-b92a-9e8944c15dcf","Type":"ContainerDied","Data":"7f959ebe1ac96ee33cc1039601c50b8b0b8c88a9f5fd63b91a0d1b44669286fc"} Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.292621 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.389763 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-inventory-0\") pod \"4a506120-df34-41d9-b92a-9e8944c15dcf\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.389872 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwfzf\" (UniqueName: \"kubernetes.io/projected/4a506120-df34-41d9-b92a-9e8944c15dcf-kube-api-access-nwfzf\") pod \"4a506120-df34-41d9-b92a-9e8944c15dcf\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.389914 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-ssh-key-openstack-edpm-ipam\") pod \"4a506120-df34-41d9-b92a-9e8944c15dcf\" (UID: \"4a506120-df34-41d9-b92a-9e8944c15dcf\") " Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.396574 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a506120-df34-41d9-b92a-9e8944c15dcf-kube-api-access-nwfzf" (OuterVolumeSpecName: "kube-api-access-nwfzf") pod "4a506120-df34-41d9-b92a-9e8944c15dcf" (UID: "4a506120-df34-41d9-b92a-9e8944c15dcf"). InnerVolumeSpecName "kube-api-access-nwfzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.417468 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "4a506120-df34-41d9-b92a-9e8944c15dcf" (UID: "4a506120-df34-41d9-b92a-9e8944c15dcf"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.444552 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "4a506120-df34-41d9-b92a-9e8944c15dcf" (UID: "4a506120-df34-41d9-b92a-9e8944c15dcf"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.492553 4813 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.492591 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwfzf\" (UniqueName: \"kubernetes.io/projected/4a506120-df34-41d9-b92a-9e8944c15dcf-kube-api-access-nwfzf\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.492605 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4a506120-df34-41d9-b92a-9e8944c15dcf-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.882640 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" event={"ID":"4a506120-df34-41d9-b92a-9e8944c15dcf","Type":"ContainerDied","Data":"3b8072667d074665379dc23347fd8d1b66466a5409ea8521476a629be86a7791"} Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.882724 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b8072667d074665379dc23347fd8d1b66466a5409ea8521476a629be86a7791" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.882766 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z5kbp" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.971927 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh"] Oct 07 19:50:36 crc kubenswrapper[4813]: E1007 19:50:36.972346 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a506120-df34-41d9-b92a-9e8944c15dcf" containerName="ssh-known-hosts-edpm-deployment" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.972365 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a506120-df34-41d9-b92a-9e8944c15dcf" containerName="ssh-known-hosts-edpm-deployment" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.972606 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a506120-df34-41d9-b92a-9e8944c15dcf" containerName="ssh-known-hosts-edpm-deployment" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.973307 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.976011 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.976066 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.976272 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.983104 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:50:36 crc kubenswrapper[4813]: I1007 19:50:36.990489 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh"] Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.003289 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgvmb\" (UniqueName: \"kubernetes.io/projected/ab40c88e-7fbf-44d6-83a6-0bb6be959120-kube-api-access-dgvmb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.003522 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.003566 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.105580 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.105693 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.105742 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgvmb\" (UniqueName: \"kubernetes.io/projected/ab40c88e-7fbf-44d6-83a6-0bb6be959120-kube-api-access-dgvmb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.114038 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.114092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.121041 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgvmb\" (UniqueName: \"kubernetes.io/projected/ab40c88e-7fbf-44d6-83a6-0bb6be959120-kube-api-access-dgvmb\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4ntxh\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.295599 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.854841 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh"] Oct 07 19:50:37 crc kubenswrapper[4813]: W1007 19:50:37.857496 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab40c88e_7fbf_44d6_83a6_0bb6be959120.slice/crio-fc90c052f49a3c579824ebcc42baefeb6646016e7af017291fd9d3ee4e7e00a2 WatchSource:0}: Error finding container fc90c052f49a3c579824ebcc42baefeb6646016e7af017291fd9d3ee4e7e00a2: Status 404 returned error can't find the container with id fc90c052f49a3c579824ebcc42baefeb6646016e7af017291fd9d3ee4e7e00a2 Oct 07 19:50:37 crc kubenswrapper[4813]: I1007 19:50:37.896392 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" event={"ID":"ab40c88e-7fbf-44d6-83a6-0bb6be959120","Type":"ContainerStarted","Data":"fc90c052f49a3c579824ebcc42baefeb6646016e7af017291fd9d3ee4e7e00a2"} Oct 07 19:50:38 crc kubenswrapper[4813]: I1007 19:50:38.907770 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" event={"ID":"ab40c88e-7fbf-44d6-83a6-0bb6be959120","Type":"ContainerStarted","Data":"dd5c0c4c1b1e3f0376fad97a77813a0730d63be21cc7f141f793ba86214dafb8"} Oct 07 19:50:38 crc kubenswrapper[4813]: I1007 19:50:38.930760 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" podStartSLOduration=2.323362396 podStartE2EDuration="2.930735508s" podCreationTimestamp="2025-10-07 19:50:36 +0000 UTC" firstStartedPulling="2025-10-07 19:50:37.860383877 +0000 UTC m=+1963.938639498" lastFinishedPulling="2025-10-07 19:50:38.467756999 +0000 UTC m=+1964.546012610" observedRunningTime="2025-10-07 19:50:38.925699665 +0000 UTC m=+1965.003955286" watchObservedRunningTime="2025-10-07 19:50:38.930735508 +0000 UTC m=+1965.008991129" Oct 07 19:50:47 crc kubenswrapper[4813]: I1007 19:50:47.996416 4813 generic.go:334] "Generic (PLEG): container finished" podID="ab40c88e-7fbf-44d6-83a6-0bb6be959120" containerID="dd5c0c4c1b1e3f0376fad97a77813a0730d63be21cc7f141f793ba86214dafb8" exitCode=0 Oct 07 19:50:47 crc kubenswrapper[4813]: I1007 19:50:47.996512 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" event={"ID":"ab40c88e-7fbf-44d6-83a6-0bb6be959120","Type":"ContainerDied","Data":"dd5c0c4c1b1e3f0376fad97a77813a0730d63be21cc7f141f793ba86214dafb8"} Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.411430 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.586872 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-ssh-key\") pod \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.587315 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-inventory\") pod \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.587458 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgvmb\" (UniqueName: \"kubernetes.io/projected/ab40c88e-7fbf-44d6-83a6-0bb6be959120-kube-api-access-dgvmb\") pod \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\" (UID: \"ab40c88e-7fbf-44d6-83a6-0bb6be959120\") " Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.595508 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab40c88e-7fbf-44d6-83a6-0bb6be959120-kube-api-access-dgvmb" (OuterVolumeSpecName: "kube-api-access-dgvmb") pod "ab40c88e-7fbf-44d6-83a6-0bb6be959120" (UID: "ab40c88e-7fbf-44d6-83a6-0bb6be959120"). InnerVolumeSpecName "kube-api-access-dgvmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.615938 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab40c88e-7fbf-44d6-83a6-0bb6be959120" (UID: "ab40c88e-7fbf-44d6-83a6-0bb6be959120"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.616153 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-inventory" (OuterVolumeSpecName: "inventory") pod "ab40c88e-7fbf-44d6-83a6-0bb6be959120" (UID: "ab40c88e-7fbf-44d6-83a6-0bb6be959120"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.689993 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgvmb\" (UniqueName: \"kubernetes.io/projected/ab40c88e-7fbf-44d6-83a6-0bb6be959120-kube-api-access-dgvmb\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.690044 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:49 crc kubenswrapper[4813]: I1007 19:50:49.690063 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab40c88e-7fbf-44d6-83a6-0bb6be959120-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.018159 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" event={"ID":"ab40c88e-7fbf-44d6-83a6-0bb6be959120","Type":"ContainerDied","Data":"fc90c052f49a3c579824ebcc42baefeb6646016e7af017291fd9d3ee4e7e00a2"} Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.018220 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc90c052f49a3c579824ebcc42baefeb6646016e7af017291fd9d3ee4e7e00a2" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.018229 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4ntxh" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.165431 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf"] Oct 07 19:50:50 crc kubenswrapper[4813]: E1007 19:50:50.165909 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab40c88e-7fbf-44d6-83a6-0bb6be959120" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.165932 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab40c88e-7fbf-44d6-83a6-0bb6be959120" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.166166 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab40c88e-7fbf-44d6-83a6-0bb6be959120" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.166937 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.171455 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.173789 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.173931 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.173946 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.181991 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf"] Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.303411 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.303619 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8gdq\" (UniqueName: \"kubernetes.io/projected/a9b763f6-c95e-4650-8aa4-3f99675f3e48-kube-api-access-m8gdq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.303664 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.405569 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8gdq\" (UniqueName: \"kubernetes.io/projected/a9b763f6-c95e-4650-8aa4-3f99675f3e48-kube-api-access-m8gdq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.405629 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.405698 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.410931 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.416860 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.422293 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8gdq\" (UniqueName: \"kubernetes.io/projected/a9b763f6-c95e-4650-8aa4-3f99675f3e48-kube-api-access-m8gdq\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:50 crc kubenswrapper[4813]: I1007 19:50:50.499762 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:50:51 crc kubenswrapper[4813]: I1007 19:50:51.028882 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf"] Oct 07 19:50:51 crc kubenswrapper[4813]: W1007 19:50:51.060151 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9b763f6_c95e_4650_8aa4_3f99675f3e48.slice/crio-e3434e6b8389c35ce67de60de02963cbf8c5652e298c180054aa103921085b6d WatchSource:0}: Error finding container e3434e6b8389c35ce67de60de02963cbf8c5652e298c180054aa103921085b6d: Status 404 returned error can't find the container with id e3434e6b8389c35ce67de60de02963cbf8c5652e298c180054aa103921085b6d Oct 07 19:50:52 crc kubenswrapper[4813]: I1007 19:50:52.045066 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" event={"ID":"a9b763f6-c95e-4650-8aa4-3f99675f3e48","Type":"ContainerStarted","Data":"e3434e6b8389c35ce67de60de02963cbf8c5652e298c180054aa103921085b6d"} Oct 07 19:50:52 crc kubenswrapper[4813]: I1007 19:50:52.079791 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:50:52 crc kubenswrapper[4813]: I1007 19:50:52.080008 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:50:53 crc kubenswrapper[4813]: I1007 19:50:53.054304 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" event={"ID":"a9b763f6-c95e-4650-8aa4-3f99675f3e48","Type":"ContainerStarted","Data":"3053f437bcd3380cae4fe3805fcf5b28015cb3f67bbb338e73e0dba789785fa2"} Oct 07 19:50:53 crc kubenswrapper[4813]: I1007 19:50:53.074713 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" podStartSLOduration=2.337033617 podStartE2EDuration="3.074309831s" podCreationTimestamp="2025-10-07 19:50:50 +0000 UTC" firstStartedPulling="2025-10-07 19:50:51.063608891 +0000 UTC m=+1977.141864502" lastFinishedPulling="2025-10-07 19:50:51.800885085 +0000 UTC m=+1977.879140716" observedRunningTime="2025-10-07 19:50:53.07249564 +0000 UTC m=+1979.150751251" watchObservedRunningTime="2025-10-07 19:50:53.074309831 +0000 UTC m=+1979.152565462" Oct 07 19:51:02 crc kubenswrapper[4813]: I1007 19:51:02.147308 4813 generic.go:334] "Generic (PLEG): container finished" podID="a9b763f6-c95e-4650-8aa4-3f99675f3e48" containerID="3053f437bcd3380cae4fe3805fcf5b28015cb3f67bbb338e73e0dba789785fa2" exitCode=0 Oct 07 19:51:02 crc kubenswrapper[4813]: I1007 19:51:02.147407 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" event={"ID":"a9b763f6-c95e-4650-8aa4-3f99675f3e48","Type":"ContainerDied","Data":"3053f437bcd3380cae4fe3805fcf5b28015cb3f67bbb338e73e0dba789785fa2"} Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.659510 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.840264 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8gdq\" (UniqueName: \"kubernetes.io/projected/a9b763f6-c95e-4650-8aa4-3f99675f3e48-kube-api-access-m8gdq\") pod \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.840621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-inventory\") pod \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.840709 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-ssh-key\") pod \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\" (UID: \"a9b763f6-c95e-4650-8aa4-3f99675f3e48\") " Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.858658 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9b763f6-c95e-4650-8aa4-3f99675f3e48-kube-api-access-m8gdq" (OuterVolumeSpecName: "kube-api-access-m8gdq") pod "a9b763f6-c95e-4650-8aa4-3f99675f3e48" (UID: "a9b763f6-c95e-4650-8aa4-3f99675f3e48"). InnerVolumeSpecName "kube-api-access-m8gdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.908865 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a9b763f6-c95e-4650-8aa4-3f99675f3e48" (UID: "a9b763f6-c95e-4650-8aa4-3f99675f3e48"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.912435 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-inventory" (OuterVolumeSpecName: "inventory") pod "a9b763f6-c95e-4650-8aa4-3f99675f3e48" (UID: "a9b763f6-c95e-4650-8aa4-3f99675f3e48"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.946858 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8gdq\" (UniqueName: \"kubernetes.io/projected/a9b763f6-c95e-4650-8aa4-3f99675f3e48-kube-api-access-m8gdq\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.946954 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:03 crc kubenswrapper[4813]: I1007 19:51:03.946974 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9b763f6-c95e-4650-8aa4-3f99675f3e48-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.172436 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" event={"ID":"a9b763f6-c95e-4650-8aa4-3f99675f3e48","Type":"ContainerDied","Data":"e3434e6b8389c35ce67de60de02963cbf8c5652e298c180054aa103921085b6d"} Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.172523 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3434e6b8389c35ce67de60de02963cbf8c5652e298c180054aa103921085b6d" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.172454 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.362991 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5"] Oct 07 19:51:04 crc kubenswrapper[4813]: E1007 19:51:04.363468 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9b763f6-c95e-4650-8aa4-3f99675f3e48" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.363491 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9b763f6-c95e-4650-8aa4-3f99675f3e48" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.363767 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9b763f6-c95e-4650-8aa4-3f99675f3e48" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.364818 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.368119 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.368543 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.368821 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.369312 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.371837 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.373024 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.373254 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.373807 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5"] Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.374219 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.454777 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455064 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455202 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455354 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455470 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455617 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455716 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455835 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.455948 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.456057 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.456187 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.456295 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.456429 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.456555 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhlxx\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-kube-api-access-xhlxx\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.558596 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.558832 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.558903 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.558981 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhlxx\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-kube-api-access-xhlxx\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559098 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559192 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559275 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559375 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559468 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559575 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559670 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559772 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.559892 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.560006 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.565096 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.565709 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.565712 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.565839 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.566168 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.566553 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.567593 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.568988 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.569212 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.569793 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.570471 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.571405 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.576268 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.577555 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhlxx\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-kube-api-access-xhlxx\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:04 crc kubenswrapper[4813]: I1007 19:51:04.700689 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:05 crc kubenswrapper[4813]: I1007 19:51:05.270619 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5"] Oct 07 19:51:06 crc kubenswrapper[4813]: I1007 19:51:06.197123 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" event={"ID":"4edb32ce-3490-4665-8fde-69010044b237","Type":"ContainerStarted","Data":"e4a9b3e065c5f97c552dbb0de4a5338b77b484d3353d8868bc33f115bafd026e"} Oct 07 19:51:06 crc kubenswrapper[4813]: I1007 19:51:06.197469 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" event={"ID":"4edb32ce-3490-4665-8fde-69010044b237","Type":"ContainerStarted","Data":"f6af4719716fb8debcf3b47b2285e8cb58fd6724cd03a17334c4ef338e6ef407"} Oct 07 19:51:06 crc kubenswrapper[4813]: I1007 19:51:06.217312 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" podStartSLOduration=1.7526392739999999 podStartE2EDuration="2.217292691s" podCreationTimestamp="2025-10-07 19:51:04 +0000 UTC" firstStartedPulling="2025-10-07 19:51:05.276910441 +0000 UTC m=+1991.355166062" lastFinishedPulling="2025-10-07 19:51:05.741563858 +0000 UTC m=+1991.819819479" observedRunningTime="2025-10-07 19:51:06.21307684 +0000 UTC m=+1992.291332471" watchObservedRunningTime="2025-10-07 19:51:06.217292691 +0000 UTC m=+1992.295548302" Oct 07 19:51:22 crc kubenswrapper[4813]: I1007 19:51:22.079425 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:51:22 crc kubenswrapper[4813]: I1007 19:51:22.079784 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:51:51 crc kubenswrapper[4813]: I1007 19:51:51.706415 4813 generic.go:334] "Generic (PLEG): container finished" podID="4edb32ce-3490-4665-8fde-69010044b237" containerID="e4a9b3e065c5f97c552dbb0de4a5338b77b484d3353d8868bc33f115bafd026e" exitCode=0 Oct 07 19:51:51 crc kubenswrapper[4813]: I1007 19:51:51.706482 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" event={"ID":"4edb32ce-3490-4665-8fde-69010044b237","Type":"ContainerDied","Data":"e4a9b3e065c5f97c552dbb0de4a5338b77b484d3353d8868bc33f115bafd026e"} Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.079401 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.079474 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.079525 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.080283 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3469e74d2f3bae111434c4527abbecf7675c22b5856d0240e0ba28b9c5f98470"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.080396 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://3469e74d2f3bae111434c4527abbecf7675c22b5856d0240e0ba28b9c5f98470" gracePeriod=600 Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.722388 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="3469e74d2f3bae111434c4527abbecf7675c22b5856d0240e0ba28b9c5f98470" exitCode=0 Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.722571 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"3469e74d2f3bae111434c4527abbecf7675c22b5856d0240e0ba28b9c5f98470"} Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.722815 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75"} Oct 07 19:51:52 crc kubenswrapper[4813]: I1007 19:51:52.722889 4813 scope.go:117] "RemoveContainer" containerID="55451354d80fce2add0cbf18708959b3aeb9e9826c265cf9407c93b68efa2617" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.144166 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.207463 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.207700 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.207804 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-telemetry-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.207874 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.207955 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-nova-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.208067 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-libvirt-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.208896 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-inventory\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.209219 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-bootstrap-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.209673 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-neutron-metadata-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.210546 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhlxx\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-kube-api-access-xhlxx\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.210696 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-repo-setup-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.210794 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ovn-combined-ca-bundle\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.210870 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-ovn-default-certs-0\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.210943 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ssh-key\") pod \"4edb32ce-3490-4665-8fde-69010044b237\" (UID: \"4edb32ce-3490-4665-8fde-69010044b237\") " Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.215476 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.215635 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.215928 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.216827 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.218019 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.219189 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.219440 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.219915 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.220684 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.222022 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.222446 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-kube-api-access-xhlxx" (OuterVolumeSpecName: "kube-api-access-xhlxx") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "kube-api-access-xhlxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.226997 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.243927 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-inventory" (OuterVolumeSpecName: "inventory") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.252675 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4edb32ce-3490-4665-8fde-69010044b237" (UID: "4edb32ce-3490-4665-8fde-69010044b237"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.312938 4813 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313172 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313186 4813 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313195 4813 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313207 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhlxx\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-kube-api-access-xhlxx\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313217 4813 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313227 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313236 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313260 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313268 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313278 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313287 4813 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313297 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4edb32ce-3490-4665-8fde-69010044b237-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.313307 4813 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4edb32ce-3490-4665-8fde-69010044b237-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.739468 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" event={"ID":"4edb32ce-3490-4665-8fde-69010044b237","Type":"ContainerDied","Data":"f6af4719716fb8debcf3b47b2285e8cb58fd6724cd03a17334c4ef338e6ef407"} Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.739512 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6af4719716fb8debcf3b47b2285e8cb58fd6724cd03a17334c4ef338e6ef407" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.739580 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.892952 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v"] Oct 07 19:51:53 crc kubenswrapper[4813]: E1007 19:51:53.893618 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4edb32ce-3490-4665-8fde-69010044b237" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.893650 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4edb32ce-3490-4665-8fde-69010044b237" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.894049 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4edb32ce-3490-4665-8fde-69010044b237" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.894810 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.898613 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.898653 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.903092 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.903485 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.904031 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.911572 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v"] Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.925106 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.925462 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/adaa6c4f-3899-4644-acb5-81f67417971e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.925653 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j2g4\" (UniqueName: \"kubernetes.io/projected/adaa6c4f-3899-4644-acb5-81f67417971e-kube-api-access-8j2g4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.925819 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:53 crc kubenswrapper[4813]: I1007 19:51:53.925963 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.027643 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.027701 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/adaa6c4f-3899-4644-acb5-81f67417971e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.027749 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j2g4\" (UniqueName: \"kubernetes.io/projected/adaa6c4f-3899-4644-acb5-81f67417971e-kube-api-access-8j2g4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.027787 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.027818 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.029207 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/adaa6c4f-3899-4644-acb5-81f67417971e-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.031957 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.032396 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.035600 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.046863 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j2g4\" (UniqueName: \"kubernetes.io/projected/adaa6c4f-3899-4644-acb5-81f67417971e-kube-api-access-8j2g4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-96t9v\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.227480 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:51:54 crc kubenswrapper[4813]: I1007 19:51:54.845243 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v"] Oct 07 19:51:55 crc kubenswrapper[4813]: I1007 19:51:55.407286 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:51:55 crc kubenswrapper[4813]: I1007 19:51:55.764151 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" event={"ID":"adaa6c4f-3899-4644-acb5-81f67417971e","Type":"ContainerStarted","Data":"c989d3f9414cbd6b4ad0e611836279c2f407fc11d3cdd212c3e2670faa67c5c8"} Oct 07 19:51:55 crc kubenswrapper[4813]: I1007 19:51:55.764607 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" event={"ID":"adaa6c4f-3899-4644-acb5-81f67417971e","Type":"ContainerStarted","Data":"4fed531e33b23c027901eb01e65f79993a255ffed68f848638e8c347e6bb17c8"} Oct 07 19:51:55 crc kubenswrapper[4813]: I1007 19:51:55.797958 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" podStartSLOduration=2.227923917 podStartE2EDuration="2.797933721s" podCreationTimestamp="2025-10-07 19:51:53 +0000 UTC" firstStartedPulling="2025-10-07 19:51:54.833139717 +0000 UTC m=+2040.911395358" lastFinishedPulling="2025-10-07 19:51:55.403149541 +0000 UTC m=+2041.481405162" observedRunningTime="2025-10-07 19:51:55.788267064 +0000 UTC m=+2041.866522675" watchObservedRunningTime="2025-10-07 19:51:55.797933721 +0000 UTC m=+2041.876189362" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.432853 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-72lxp"] Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.436264 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.444770 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-72lxp"] Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.566055 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzpjc\" (UniqueName: \"kubernetes.io/projected/041f388b-48c2-4b80-94ec-67485f6429bb-kube-api-access-xzpjc\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.566145 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-utilities\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.566225 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-catalog-content\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.668280 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzpjc\" (UniqueName: \"kubernetes.io/projected/041f388b-48c2-4b80-94ec-67485f6429bb-kube-api-access-xzpjc\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.668561 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-utilities\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.668662 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-catalog-content\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.669076 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-utilities\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.669122 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-catalog-content\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.697026 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzpjc\" (UniqueName: \"kubernetes.io/projected/041f388b-48c2-4b80-94ec-67485f6429bb-kube-api-access-xzpjc\") pod \"community-operators-72lxp\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:56 crc kubenswrapper[4813]: I1007 19:52:56.771098 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:52:57 crc kubenswrapper[4813]: I1007 19:52:57.156976 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-72lxp"] Oct 07 19:52:57 crc kubenswrapper[4813]: W1007 19:52:57.191824 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod041f388b_48c2_4b80_94ec_67485f6429bb.slice/crio-5d93a4359dfd1f9dbbe2ffa7bfae31941ddc5a1e22a6780864b4bca801a314f3 WatchSource:0}: Error finding container 5d93a4359dfd1f9dbbe2ffa7bfae31941ddc5a1e22a6780864b4bca801a314f3: Status 404 returned error can't find the container with id 5d93a4359dfd1f9dbbe2ffa7bfae31941ddc5a1e22a6780864b4bca801a314f3 Oct 07 19:52:57 crc kubenswrapper[4813]: I1007 19:52:57.438189 4813 generic.go:334] "Generic (PLEG): container finished" podID="041f388b-48c2-4b80-94ec-67485f6429bb" containerID="85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8" exitCode=0 Oct 07 19:52:57 crc kubenswrapper[4813]: I1007 19:52:57.438236 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerDied","Data":"85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8"} Oct 07 19:52:57 crc kubenswrapper[4813]: I1007 19:52:57.438264 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerStarted","Data":"5d93a4359dfd1f9dbbe2ffa7bfae31941ddc5a1e22a6780864b4bca801a314f3"} Oct 07 19:52:58 crc kubenswrapper[4813]: I1007 19:52:58.450112 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerStarted","Data":"ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048"} Oct 07 19:53:00 crc kubenswrapper[4813]: I1007 19:53:00.471422 4813 generic.go:334] "Generic (PLEG): container finished" podID="041f388b-48c2-4b80-94ec-67485f6429bb" containerID="ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048" exitCode=0 Oct 07 19:53:00 crc kubenswrapper[4813]: I1007 19:53:00.471520 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerDied","Data":"ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048"} Oct 07 19:53:01 crc kubenswrapper[4813]: I1007 19:53:01.487522 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerStarted","Data":"fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935"} Oct 07 19:53:06 crc kubenswrapper[4813]: I1007 19:53:06.771515 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:53:06 crc kubenswrapper[4813]: I1007 19:53:06.771908 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:53:06 crc kubenswrapper[4813]: I1007 19:53:06.832611 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:53:06 crc kubenswrapper[4813]: I1007 19:53:06.860057 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-72lxp" podStartSLOduration=7.196744614 podStartE2EDuration="10.860026224s" podCreationTimestamp="2025-10-07 19:52:56 +0000 UTC" firstStartedPulling="2025-10-07 19:52:57.443304383 +0000 UTC m=+2103.521559994" lastFinishedPulling="2025-10-07 19:53:01.106585993 +0000 UTC m=+2107.184841604" observedRunningTime="2025-10-07 19:53:01.516855866 +0000 UTC m=+2107.595111477" watchObservedRunningTime="2025-10-07 19:53:06.860026224 +0000 UTC m=+2112.938281855" Oct 07 19:53:07 crc kubenswrapper[4813]: I1007 19:53:07.622740 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:53:07 crc kubenswrapper[4813]: I1007 19:53:07.678684 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-72lxp"] Oct 07 19:53:09 crc kubenswrapper[4813]: I1007 19:53:09.577275 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-72lxp" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="registry-server" containerID="cri-o://fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935" gracePeriod=2 Oct 07 19:53:09 crc kubenswrapper[4813]: I1007 19:53:09.977142 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.149230 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzpjc\" (UniqueName: \"kubernetes.io/projected/041f388b-48c2-4b80-94ec-67485f6429bb-kube-api-access-xzpjc\") pod \"041f388b-48c2-4b80-94ec-67485f6429bb\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.149342 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-catalog-content\") pod \"041f388b-48c2-4b80-94ec-67485f6429bb\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.149537 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-utilities\") pod \"041f388b-48c2-4b80-94ec-67485f6429bb\" (UID: \"041f388b-48c2-4b80-94ec-67485f6429bb\") " Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.150932 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-utilities" (OuterVolumeSpecName: "utilities") pod "041f388b-48c2-4b80-94ec-67485f6429bb" (UID: "041f388b-48c2-4b80-94ec-67485f6429bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.177146 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/041f388b-48c2-4b80-94ec-67485f6429bb-kube-api-access-xzpjc" (OuterVolumeSpecName: "kube-api-access-xzpjc") pod "041f388b-48c2-4b80-94ec-67485f6429bb" (UID: "041f388b-48c2-4b80-94ec-67485f6429bb"). InnerVolumeSpecName "kube-api-access-xzpjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.223480 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "041f388b-48c2-4b80-94ec-67485f6429bb" (UID: "041f388b-48c2-4b80-94ec-67485f6429bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.252401 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzpjc\" (UniqueName: \"kubernetes.io/projected/041f388b-48c2-4b80-94ec-67485f6429bb-kube-api-access-xzpjc\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.252438 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.252450 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041f388b-48c2-4b80-94ec-67485f6429bb-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.592673 4813 generic.go:334] "Generic (PLEG): container finished" podID="041f388b-48c2-4b80-94ec-67485f6429bb" containerID="fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935" exitCode=0 Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.592748 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerDied","Data":"fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935"} Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.592803 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-72lxp" event={"ID":"041f388b-48c2-4b80-94ec-67485f6429bb","Type":"ContainerDied","Data":"5d93a4359dfd1f9dbbe2ffa7bfae31941ddc5a1e22a6780864b4bca801a314f3"} Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.592844 4813 scope.go:117] "RemoveContainer" containerID="fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.593073 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-72lxp" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.641872 4813 scope.go:117] "RemoveContainer" containerID="ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.662021 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-72lxp"] Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.667425 4813 scope.go:117] "RemoveContainer" containerID="85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.671456 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-72lxp"] Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.710567 4813 scope.go:117] "RemoveContainer" containerID="fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935" Oct 07 19:53:10 crc kubenswrapper[4813]: E1007 19:53:10.711141 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935\": container with ID starting with fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935 not found: ID does not exist" containerID="fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.711171 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935"} err="failed to get container status \"fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935\": rpc error: code = NotFound desc = could not find container \"fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935\": container with ID starting with fb6d40ca5121e7b46ac8c25397d3c1a810a3f1f2a066d8726702425f18efd935 not found: ID does not exist" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.711207 4813 scope.go:117] "RemoveContainer" containerID="ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048" Oct 07 19:53:10 crc kubenswrapper[4813]: E1007 19:53:10.711874 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048\": container with ID starting with ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048 not found: ID does not exist" containerID="ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.711900 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048"} err="failed to get container status \"ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048\": rpc error: code = NotFound desc = could not find container \"ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048\": container with ID starting with ed3094595c2b55d227359e95a8ddae7a056969c029a47175b50d90eaf54b6048 not found: ID does not exist" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.711915 4813 scope.go:117] "RemoveContainer" containerID="85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8" Oct 07 19:53:10 crc kubenswrapper[4813]: E1007 19:53:10.712185 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8\": container with ID starting with 85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8 not found: ID does not exist" containerID="85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8" Oct 07 19:53:10 crc kubenswrapper[4813]: I1007 19:53:10.712233 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8"} err="failed to get container status \"85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8\": rpc error: code = NotFound desc = could not find container \"85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8\": container with ID starting with 85b955b596d06b7db7fd2439a2c2e546afd3920059b0316081de300b626ed6d8 not found: ID does not exist" Oct 07 19:53:12 crc kubenswrapper[4813]: I1007 19:53:12.628623 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" path="/var/lib/kubelet/pods/041f388b-48c2-4b80-94ec-67485f6429bb/volumes" Oct 07 19:53:12 crc kubenswrapper[4813]: I1007 19:53:12.632141 4813 generic.go:334] "Generic (PLEG): container finished" podID="adaa6c4f-3899-4644-acb5-81f67417971e" containerID="c989d3f9414cbd6b4ad0e611836279c2f407fc11d3cdd212c3e2670faa67c5c8" exitCode=0 Oct 07 19:53:12 crc kubenswrapper[4813]: I1007 19:53:12.632625 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" event={"ID":"adaa6c4f-3899-4644-acb5-81f67417971e","Type":"ContainerDied","Data":"c989d3f9414cbd6b4ad0e611836279c2f407fc11d3cdd212c3e2670faa67c5c8"} Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.180755 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.349007 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j2g4\" (UniqueName: \"kubernetes.io/projected/adaa6c4f-3899-4644-acb5-81f67417971e-kube-api-access-8j2g4\") pod \"adaa6c4f-3899-4644-acb5-81f67417971e\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.349081 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/adaa6c4f-3899-4644-acb5-81f67417971e-ovncontroller-config-0\") pod \"adaa6c4f-3899-4644-acb5-81f67417971e\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.349114 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ovn-combined-ca-bundle\") pod \"adaa6c4f-3899-4644-acb5-81f67417971e\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.349191 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-inventory\") pod \"adaa6c4f-3899-4644-acb5-81f67417971e\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.349373 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ssh-key\") pod \"adaa6c4f-3899-4644-acb5-81f67417971e\" (UID: \"adaa6c4f-3899-4644-acb5-81f67417971e\") " Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.355140 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "adaa6c4f-3899-4644-acb5-81f67417971e" (UID: "adaa6c4f-3899-4644-acb5-81f67417971e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.377570 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adaa6c4f-3899-4644-acb5-81f67417971e-kube-api-access-8j2g4" (OuterVolumeSpecName: "kube-api-access-8j2g4") pod "adaa6c4f-3899-4644-acb5-81f67417971e" (UID: "adaa6c4f-3899-4644-acb5-81f67417971e"). InnerVolumeSpecName "kube-api-access-8j2g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.390721 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "adaa6c4f-3899-4644-acb5-81f67417971e" (UID: "adaa6c4f-3899-4644-acb5-81f67417971e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.399621 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adaa6c4f-3899-4644-acb5-81f67417971e-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "adaa6c4f-3899-4644-acb5-81f67417971e" (UID: "adaa6c4f-3899-4644-acb5-81f67417971e"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.414442 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-inventory" (OuterVolumeSpecName: "inventory") pod "adaa6c4f-3899-4644-acb5-81f67417971e" (UID: "adaa6c4f-3899-4644-acb5-81f67417971e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.451304 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.451366 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j2g4\" (UniqueName: \"kubernetes.io/projected/adaa6c4f-3899-4644-acb5-81f67417971e-kube-api-access-8j2g4\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.451379 4813 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/adaa6c4f-3899-4644-acb5-81f67417971e-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.451389 4813 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.451397 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/adaa6c4f-3899-4644-acb5-81f67417971e-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.662247 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" event={"ID":"adaa6c4f-3899-4644-acb5-81f67417971e","Type":"ContainerDied","Data":"4fed531e33b23c027901eb01e65f79993a255ffed68f848638e8c347e6bb17c8"} Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.662308 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fed531e33b23c027901eb01e65f79993a255ffed68f848638e8c347e6bb17c8" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.662399 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-96t9v" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.762169 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl"] Oct 07 19:53:14 crc kubenswrapper[4813]: E1007 19:53:14.762756 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="registry-server" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.762778 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="registry-server" Oct 07 19:53:14 crc kubenswrapper[4813]: E1007 19:53:14.762795 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="extract-content" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.762806 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="extract-content" Oct 07 19:53:14 crc kubenswrapper[4813]: E1007 19:53:14.762836 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adaa6c4f-3899-4644-acb5-81f67417971e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.762844 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="adaa6c4f-3899-4644-acb5-81f67417971e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 19:53:14 crc kubenswrapper[4813]: E1007 19:53:14.762884 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="extract-utilities" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.762892 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="extract-utilities" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.763112 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="041f388b-48c2-4b80-94ec-67485f6429bb" containerName="registry-server" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.763133 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="adaa6c4f-3899-4644-acb5-81f67417971e" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.763973 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.766968 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.767098 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.767115 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.768262 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.768747 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.768847 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.780148 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl"] Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.858983 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.859245 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.859483 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.859699 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6jgp\" (UniqueName: \"kubernetes.io/projected/d4674843-15aa-4490-a878-bc2853b4457b-kube-api-access-s6jgp\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.859801 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.860105 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.961578 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.962015 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.962057 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.962110 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.962179 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6jgp\" (UniqueName: \"kubernetes.io/projected/d4674843-15aa-4490-a878-bc2853b4457b-kube-api-access-s6jgp\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.962213 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.968476 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.968574 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.968941 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.969431 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.973516 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:14 crc kubenswrapper[4813]: I1007 19:53:14.990209 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6jgp\" (UniqueName: \"kubernetes.io/projected/d4674843-15aa-4490-a878-bc2853b4457b-kube-api-access-s6jgp\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:15 crc kubenswrapper[4813]: I1007 19:53:15.078838 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:53:15 crc kubenswrapper[4813]: I1007 19:53:15.700399 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl"] Oct 07 19:53:15 crc kubenswrapper[4813]: I1007 19:53:15.712745 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:53:16 crc kubenswrapper[4813]: I1007 19:53:16.685320 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" event={"ID":"d4674843-15aa-4490-a878-bc2853b4457b","Type":"ContainerStarted","Data":"ce36f4e088a8d5cc24cf4b51fbeb450ca40becb7cb24da101bbc8de052aee37d"} Oct 07 19:53:16 crc kubenswrapper[4813]: I1007 19:53:16.685748 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" event={"ID":"d4674843-15aa-4490-a878-bc2853b4457b","Type":"ContainerStarted","Data":"c47856081f61756986b8f565e5197a7202fa0dbacdccbbc666ab8c5c4608ccba"} Oct 07 19:53:16 crc kubenswrapper[4813]: I1007 19:53:16.725924 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" podStartSLOduration=2.224537267 podStartE2EDuration="2.725898833s" podCreationTimestamp="2025-10-07 19:53:14 +0000 UTC" firstStartedPulling="2025-10-07 19:53:15.712494985 +0000 UTC m=+2121.790750596" lastFinishedPulling="2025-10-07 19:53:16.213856511 +0000 UTC m=+2122.292112162" observedRunningTime="2025-10-07 19:53:16.715386872 +0000 UTC m=+2122.793642503" watchObservedRunningTime="2025-10-07 19:53:16.725898833 +0000 UTC m=+2122.804154454" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.284858 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-64gd9"] Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.290134 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.317095 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-64gd9"] Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.442278 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-utilities\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.442401 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkn6c\" (UniqueName: \"kubernetes.io/projected/767b74fe-63c7-4ce9-b64d-fb6aedea2280-kube-api-access-gkn6c\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.442471 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-catalog-content\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.544604 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-utilities\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.544702 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkn6c\" (UniqueName: \"kubernetes.io/projected/767b74fe-63c7-4ce9-b64d-fb6aedea2280-kube-api-access-gkn6c\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.544762 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-catalog-content\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.545292 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-catalog-content\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.546087 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-utilities\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.579255 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkn6c\" (UniqueName: \"kubernetes.io/projected/767b74fe-63c7-4ce9-b64d-fb6aedea2280-kube-api-access-gkn6c\") pod \"certified-operators-64gd9\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:45 crc kubenswrapper[4813]: I1007 19:53:45.613396 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:46 crc kubenswrapper[4813]: I1007 19:53:46.063166 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-64gd9"] Oct 07 19:53:47 crc kubenswrapper[4813]: I1007 19:53:47.050394 4813 generic.go:334] "Generic (PLEG): container finished" podID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerID="e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247" exitCode=0 Oct 07 19:53:47 crc kubenswrapper[4813]: I1007 19:53:47.050534 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerDied","Data":"e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247"} Oct 07 19:53:47 crc kubenswrapper[4813]: I1007 19:53:47.050785 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerStarted","Data":"8075b01fbab7b96b2a0ba9e25c5a26fb53a94bdcf9c6fdf92b6cc2ae0c3ff378"} Oct 07 19:53:48 crc kubenswrapper[4813]: I1007 19:53:48.062374 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerStarted","Data":"c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb"} Oct 07 19:53:49 crc kubenswrapper[4813]: I1007 19:53:49.073979 4813 generic.go:334] "Generic (PLEG): container finished" podID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerID="c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb" exitCode=0 Oct 07 19:53:49 crc kubenswrapper[4813]: I1007 19:53:49.074036 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerDied","Data":"c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb"} Oct 07 19:53:50 crc kubenswrapper[4813]: I1007 19:53:50.087709 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerStarted","Data":"9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c"} Oct 07 19:53:50 crc kubenswrapper[4813]: I1007 19:53:50.110844 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-64gd9" podStartSLOduration=2.663067956 podStartE2EDuration="5.110823889s" podCreationTimestamp="2025-10-07 19:53:45 +0000 UTC" firstStartedPulling="2025-10-07 19:53:47.053461712 +0000 UTC m=+2153.131717333" lastFinishedPulling="2025-10-07 19:53:49.501217615 +0000 UTC m=+2155.579473266" observedRunningTime="2025-10-07 19:53:50.108194673 +0000 UTC m=+2156.186450284" watchObservedRunningTime="2025-10-07 19:53:50.110823889 +0000 UTC m=+2156.189079500" Oct 07 19:53:52 crc kubenswrapper[4813]: I1007 19:53:52.079515 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:53:52 crc kubenswrapper[4813]: I1007 19:53:52.080045 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:53:55 crc kubenswrapper[4813]: I1007 19:53:55.613685 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:55 crc kubenswrapper[4813]: I1007 19:53:55.614056 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:55 crc kubenswrapper[4813]: I1007 19:53:55.681271 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:56 crc kubenswrapper[4813]: I1007 19:53:56.252294 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:56 crc kubenswrapper[4813]: I1007 19:53:56.302783 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-64gd9"] Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.229129 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-64gd9" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="registry-server" containerID="cri-o://9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c" gracePeriod=2 Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.739586 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.758448 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-catalog-content\") pod \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.758530 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-utilities\") pod \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.758569 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkn6c\" (UniqueName: \"kubernetes.io/projected/767b74fe-63c7-4ce9-b64d-fb6aedea2280-kube-api-access-gkn6c\") pod \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\" (UID: \"767b74fe-63c7-4ce9-b64d-fb6aedea2280\") " Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.761990 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-utilities" (OuterVolumeSpecName: "utilities") pod "767b74fe-63c7-4ce9-b64d-fb6aedea2280" (UID: "767b74fe-63c7-4ce9-b64d-fb6aedea2280"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.776121 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/767b74fe-63c7-4ce9-b64d-fb6aedea2280-kube-api-access-gkn6c" (OuterVolumeSpecName: "kube-api-access-gkn6c") pod "767b74fe-63c7-4ce9-b64d-fb6aedea2280" (UID: "767b74fe-63c7-4ce9-b64d-fb6aedea2280"). InnerVolumeSpecName "kube-api-access-gkn6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.858584 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "767b74fe-63c7-4ce9-b64d-fb6aedea2280" (UID: "767b74fe-63c7-4ce9-b64d-fb6aedea2280"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.860295 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.860351 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767b74fe-63c7-4ce9-b64d-fb6aedea2280-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:58 crc kubenswrapper[4813]: I1007 19:53:58.860383 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkn6c\" (UniqueName: \"kubernetes.io/projected/767b74fe-63c7-4ce9-b64d-fb6aedea2280-kube-api-access-gkn6c\") on node \"crc\" DevicePath \"\"" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.241555 4813 generic.go:334] "Generic (PLEG): container finished" podID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerID="9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c" exitCode=0 Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.241625 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerDied","Data":"9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c"} Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.241682 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-64gd9" event={"ID":"767b74fe-63c7-4ce9-b64d-fb6aedea2280","Type":"ContainerDied","Data":"8075b01fbab7b96b2a0ba9e25c5a26fb53a94bdcf9c6fdf92b6cc2ae0c3ff378"} Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.241714 4813 scope.go:117] "RemoveContainer" containerID="9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.242609 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-64gd9" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.265945 4813 scope.go:117] "RemoveContainer" containerID="c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.283122 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-64gd9"] Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.292416 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-64gd9"] Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.309342 4813 scope.go:117] "RemoveContainer" containerID="e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.354260 4813 scope.go:117] "RemoveContainer" containerID="9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c" Oct 07 19:53:59 crc kubenswrapper[4813]: E1007 19:53:59.354959 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c\": container with ID starting with 9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c not found: ID does not exist" containerID="9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.354999 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c"} err="failed to get container status \"9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c\": rpc error: code = NotFound desc = could not find container \"9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c\": container with ID starting with 9ed35969e6710344e8d3b67c50f14d34ad5f8dde5fc1e4ad652a76f91405038c not found: ID does not exist" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.355025 4813 scope.go:117] "RemoveContainer" containerID="c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb" Oct 07 19:53:59 crc kubenswrapper[4813]: E1007 19:53:59.355433 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb\": container with ID starting with c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb not found: ID does not exist" containerID="c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.355459 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb"} err="failed to get container status \"c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb\": rpc error: code = NotFound desc = could not find container \"c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb\": container with ID starting with c8f56d347f7d702bccff7c2643da1588639a48a94ae95c7bce180859c69360cb not found: ID does not exist" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.355476 4813 scope.go:117] "RemoveContainer" containerID="e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247" Oct 07 19:53:59 crc kubenswrapper[4813]: E1007 19:53:59.355766 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247\": container with ID starting with e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247 not found: ID does not exist" containerID="e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247" Oct 07 19:53:59 crc kubenswrapper[4813]: I1007 19:53:59.355789 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247"} err="failed to get container status \"e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247\": rpc error: code = NotFound desc = could not find container \"e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247\": container with ID starting with e34a9ebf77fd75dfddc2943964648700ffbdd7ae014a92273709b8434dec6247 not found: ID does not exist" Oct 07 19:54:00 crc kubenswrapper[4813]: I1007 19:54:00.630114 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" path="/var/lib/kubelet/pods/767b74fe-63c7-4ce9-b64d-fb6aedea2280/volumes" Oct 07 19:54:15 crc kubenswrapper[4813]: I1007 19:54:15.418996 4813 generic.go:334] "Generic (PLEG): container finished" podID="d4674843-15aa-4490-a878-bc2853b4457b" containerID="ce36f4e088a8d5cc24cf4b51fbeb450ca40becb7cb24da101bbc8de052aee37d" exitCode=0 Oct 07 19:54:15 crc kubenswrapper[4813]: I1007 19:54:15.419150 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" event={"ID":"d4674843-15aa-4490-a878-bc2853b4457b","Type":"ContainerDied","Data":"ce36f4e088a8d5cc24cf4b51fbeb450ca40becb7cb24da101bbc8de052aee37d"} Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.859287 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.886911 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-inventory\") pod \"d4674843-15aa-4490-a878-bc2853b4457b\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.887093 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-metadata-combined-ca-bundle\") pod \"d4674843-15aa-4490-a878-bc2853b4457b\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.888085 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6jgp\" (UniqueName: \"kubernetes.io/projected/d4674843-15aa-4490-a878-bc2853b4457b-kube-api-access-s6jgp\") pod \"d4674843-15aa-4490-a878-bc2853b4457b\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.888131 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-ssh-key\") pod \"d4674843-15aa-4490-a878-bc2853b4457b\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.888203 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"d4674843-15aa-4490-a878-bc2853b4457b\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.888260 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-nova-metadata-neutron-config-0\") pod \"d4674843-15aa-4490-a878-bc2853b4457b\" (UID: \"d4674843-15aa-4490-a878-bc2853b4457b\") " Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.893544 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "d4674843-15aa-4490-a878-bc2853b4457b" (UID: "d4674843-15aa-4490-a878-bc2853b4457b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.900787 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4674843-15aa-4490-a878-bc2853b4457b-kube-api-access-s6jgp" (OuterVolumeSpecName: "kube-api-access-s6jgp") pod "d4674843-15aa-4490-a878-bc2853b4457b" (UID: "d4674843-15aa-4490-a878-bc2853b4457b"). InnerVolumeSpecName "kube-api-access-s6jgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.930765 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-inventory" (OuterVolumeSpecName: "inventory") pod "d4674843-15aa-4490-a878-bc2853b4457b" (UID: "d4674843-15aa-4490-a878-bc2853b4457b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.935204 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "d4674843-15aa-4490-a878-bc2853b4457b" (UID: "d4674843-15aa-4490-a878-bc2853b4457b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.963046 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "d4674843-15aa-4490-a878-bc2853b4457b" (UID: "d4674843-15aa-4490-a878-bc2853b4457b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.977925 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d4674843-15aa-4490-a878-bc2853b4457b" (UID: "d4674843-15aa-4490-a878-bc2853b4457b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.991460 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.991489 4813 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.991517 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6jgp\" (UniqueName: \"kubernetes.io/projected/d4674843-15aa-4490-a878-bc2853b4457b-kube-api-access-s6jgp\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.991527 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.991536 4813 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:16 crc kubenswrapper[4813]: I1007 19:54:16.991546 4813 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d4674843-15aa-4490-a878-bc2853b4457b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.451777 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" event={"ID":"d4674843-15aa-4490-a878-bc2853b4457b","Type":"ContainerDied","Data":"c47856081f61756986b8f565e5197a7202fa0dbacdccbbc666ab8c5c4608ccba"} Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.452061 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c47856081f61756986b8f565e5197a7202fa0dbacdccbbc666ab8c5c4608ccba" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.451841 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.560807 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96"] Oct 07 19:54:17 crc kubenswrapper[4813]: E1007 19:54:17.561982 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="registry-server" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.562112 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="registry-server" Oct 07 19:54:17 crc kubenswrapper[4813]: E1007 19:54:17.562215 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="extract-utilities" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.562287 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="extract-utilities" Oct 07 19:54:17 crc kubenswrapper[4813]: E1007 19:54:17.562411 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4674843-15aa-4490-a878-bc2853b4457b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.562487 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4674843-15aa-4490-a878-bc2853b4457b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 19:54:17 crc kubenswrapper[4813]: E1007 19:54:17.562574 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="extract-content" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.562882 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="extract-content" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.563198 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4674843-15aa-4490-a878-bc2853b4457b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.563297 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="767b74fe-63c7-4ce9-b64d-fb6aedea2280" containerName="registry-server" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.564203 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.571246 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.571551 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.571572 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.571762 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.572402 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.588850 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96"] Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.603048 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.603109 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.603133 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jv9h\" (UniqueName: \"kubernetes.io/projected/a4b24290-359e-4973-bf65-53ca4889870d-kube-api-access-6jv9h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.603275 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.603377 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.703924 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.704005 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.704028 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.704044 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jv9h\" (UniqueName: \"kubernetes.io/projected/a4b24290-359e-4973-bf65-53ca4889870d-kube-api-access-6jv9h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.704122 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.708457 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.708865 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.709678 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.710070 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.733161 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jv9h\" (UniqueName: \"kubernetes.io/projected/a4b24290-359e-4973-bf65-53ca4889870d-kube-api-access-6jv9h\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4dn96\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:17 crc kubenswrapper[4813]: I1007 19:54:17.892397 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:54:18 crc kubenswrapper[4813]: I1007 19:54:18.488787 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96"] Oct 07 19:54:19 crc kubenswrapper[4813]: I1007 19:54:19.470284 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" event={"ID":"a4b24290-359e-4973-bf65-53ca4889870d","Type":"ContainerStarted","Data":"2248a170fb16bd47e88a51a1732ef1542317c97deedfbd15debfff9718745e72"} Oct 07 19:54:19 crc kubenswrapper[4813]: I1007 19:54:19.470915 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" event={"ID":"a4b24290-359e-4973-bf65-53ca4889870d","Type":"ContainerStarted","Data":"1885bd31ba284f64e5a07991d59ff9c75c5b6fa83b1592588ade60c696da9bc2"} Oct 07 19:54:19 crc kubenswrapper[4813]: I1007 19:54:19.491512 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" podStartSLOduration=2.015437924 podStartE2EDuration="2.491495272s" podCreationTimestamp="2025-10-07 19:54:17 +0000 UTC" firstStartedPulling="2025-10-07 19:54:18.496617764 +0000 UTC m=+2184.574873375" lastFinishedPulling="2025-10-07 19:54:18.972675112 +0000 UTC m=+2185.050930723" observedRunningTime="2025-10-07 19:54:19.484450229 +0000 UTC m=+2185.562705860" watchObservedRunningTime="2025-10-07 19:54:19.491495272 +0000 UTC m=+2185.569750883" Oct 07 19:54:22 crc kubenswrapper[4813]: I1007 19:54:22.079108 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:54:22 crc kubenswrapper[4813]: I1007 19:54:22.079857 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.228804 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9qh2x"] Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.232230 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.241122 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qh2x"] Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.255742 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-catalog-content\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.255930 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldtq7\" (UniqueName: \"kubernetes.io/projected/289bda1e-7d3f-4f72-af95-d83f7a8f2379-kube-api-access-ldtq7\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.255990 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-utilities\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.357822 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-catalog-content\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.357984 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldtq7\" (UniqueName: \"kubernetes.io/projected/289bda1e-7d3f-4f72-af95-d83f7a8f2379-kube-api-access-ldtq7\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.358051 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-utilities\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.358525 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-catalog-content\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.358663 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-utilities\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.387141 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldtq7\" (UniqueName: \"kubernetes.io/projected/289bda1e-7d3f-4f72-af95-d83f7a8f2379-kube-api-access-ldtq7\") pod \"redhat-marketplace-9qh2x\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.404018 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bzjrz"] Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.418086 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.421957 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bzjrz"] Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.459952 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-catalog-content\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.460027 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-utilities\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.460058 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwksx\" (UniqueName: \"kubernetes.io/projected/496e79ac-e377-4874-b3c5-57fe92f9262d-kube-api-access-lwksx\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.562062 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-catalog-content\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.562212 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-utilities\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.562238 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwksx\" (UniqueName: \"kubernetes.io/projected/496e79ac-e377-4874-b3c5-57fe92f9262d-kube-api-access-lwksx\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.562659 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-catalog-content\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.563020 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.563425 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-utilities\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.602616 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwksx\" (UniqueName: \"kubernetes.io/projected/496e79ac-e377-4874-b3c5-57fe92f9262d-kube-api-access-lwksx\") pod \"redhat-operators-bzjrz\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:24 crc kubenswrapper[4813]: I1007 19:54:24.768171 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.072854 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qh2x"] Oct 07 19:54:25 crc kubenswrapper[4813]: W1007 19:54:25.078736 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod289bda1e_7d3f_4f72_af95_d83f7a8f2379.slice/crio-1bb2f3934bf1bdffb195ddfb5fc93f133e13644922e64fd7823aa4c0bc003426 WatchSource:0}: Error finding container 1bb2f3934bf1bdffb195ddfb5fc93f133e13644922e64fd7823aa4c0bc003426: Status 404 returned error can't find the container with id 1bb2f3934bf1bdffb195ddfb5fc93f133e13644922e64fd7823aa4c0bc003426 Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.262402 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bzjrz"] Oct 07 19:54:25 crc kubenswrapper[4813]: W1007 19:54:25.289809 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod496e79ac_e377_4874_b3c5_57fe92f9262d.slice/crio-f212d34c0695ca626fc3d420e59a14b3c395155f1875217fd83efbb8ceb7d81b WatchSource:0}: Error finding container f212d34c0695ca626fc3d420e59a14b3c395155f1875217fd83efbb8ceb7d81b: Status 404 returned error can't find the container with id f212d34c0695ca626fc3d420e59a14b3c395155f1875217fd83efbb8ceb7d81b Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.552682 4813 generic.go:334] "Generic (PLEG): container finished" podID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerID="abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723" exitCode=0 Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.552896 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerDied","Data":"abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723"} Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.552969 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerStarted","Data":"1bb2f3934bf1bdffb195ddfb5fc93f133e13644922e64fd7823aa4c0bc003426"} Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.555940 4813 generic.go:334] "Generic (PLEG): container finished" podID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerID="cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a" exitCode=0 Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.555976 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerDied","Data":"cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a"} Oct 07 19:54:25 crc kubenswrapper[4813]: I1007 19:54:25.556003 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerStarted","Data":"f212d34c0695ca626fc3d420e59a14b3c395155f1875217fd83efbb8ceb7d81b"} Oct 07 19:54:26 crc kubenswrapper[4813]: I1007 19:54:26.567301 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerStarted","Data":"e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a"} Oct 07 19:54:26 crc kubenswrapper[4813]: I1007 19:54:26.572731 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerStarted","Data":"2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db"} Oct 07 19:54:27 crc kubenswrapper[4813]: I1007 19:54:27.598621 4813 generic.go:334] "Generic (PLEG): container finished" podID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerID="2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db" exitCode=0 Oct 07 19:54:27 crc kubenswrapper[4813]: I1007 19:54:27.599021 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerDied","Data":"2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db"} Oct 07 19:54:28 crc kubenswrapper[4813]: I1007 19:54:28.616854 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerStarted","Data":"64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038"} Oct 07 19:54:28 crc kubenswrapper[4813]: I1007 19:54:28.641695 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9qh2x" podStartSLOduration=1.970912558 podStartE2EDuration="4.641673953s" podCreationTimestamp="2025-10-07 19:54:24 +0000 UTC" firstStartedPulling="2025-10-07 19:54:25.554375214 +0000 UTC m=+2191.632630825" lastFinishedPulling="2025-10-07 19:54:28.225136589 +0000 UTC m=+2194.303392220" observedRunningTime="2025-10-07 19:54:28.633954401 +0000 UTC m=+2194.712210022" watchObservedRunningTime="2025-10-07 19:54:28.641673953 +0000 UTC m=+2194.719929574" Oct 07 19:54:30 crc kubenswrapper[4813]: I1007 19:54:30.647299 4813 generic.go:334] "Generic (PLEG): container finished" podID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerID="e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a" exitCode=0 Oct 07 19:54:30 crc kubenswrapper[4813]: I1007 19:54:30.647389 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerDied","Data":"e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a"} Oct 07 19:54:31 crc kubenswrapper[4813]: I1007 19:54:31.662391 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerStarted","Data":"c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f"} Oct 07 19:54:31 crc kubenswrapper[4813]: I1007 19:54:31.688815 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bzjrz" podStartSLOduration=2.136772215 podStartE2EDuration="7.688796286s" podCreationTimestamp="2025-10-07 19:54:24 +0000 UTC" firstStartedPulling="2025-10-07 19:54:25.557149024 +0000 UTC m=+2191.635404625" lastFinishedPulling="2025-10-07 19:54:31.109173075 +0000 UTC m=+2197.187428696" observedRunningTime="2025-10-07 19:54:31.68686483 +0000 UTC m=+2197.765120461" watchObservedRunningTime="2025-10-07 19:54:31.688796286 +0000 UTC m=+2197.767051897" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.563377 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.563847 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.619773 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.731142 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.769587 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.769887 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:34 crc kubenswrapper[4813]: I1007 19:54:34.996659 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qh2x"] Oct 07 19:54:35 crc kubenswrapper[4813]: I1007 19:54:35.823778 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bzjrz" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="registry-server" probeResult="failure" output=< Oct 07 19:54:35 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 19:54:35 crc kubenswrapper[4813]: > Oct 07 19:54:36 crc kubenswrapper[4813]: I1007 19:54:36.704969 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9qh2x" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="registry-server" containerID="cri-o://64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038" gracePeriod=2 Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.185120 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.301486 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-utilities\") pod \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.301653 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldtq7\" (UniqueName: \"kubernetes.io/projected/289bda1e-7d3f-4f72-af95-d83f7a8f2379-kube-api-access-ldtq7\") pod \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.301705 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-catalog-content\") pod \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\" (UID: \"289bda1e-7d3f-4f72-af95-d83f7a8f2379\") " Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.302211 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-utilities" (OuterVolumeSpecName: "utilities") pod "289bda1e-7d3f-4f72-af95-d83f7a8f2379" (UID: "289bda1e-7d3f-4f72-af95-d83f7a8f2379"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.312060 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/289bda1e-7d3f-4f72-af95-d83f7a8f2379-kube-api-access-ldtq7" (OuterVolumeSpecName: "kube-api-access-ldtq7") pod "289bda1e-7d3f-4f72-af95-d83f7a8f2379" (UID: "289bda1e-7d3f-4f72-af95-d83f7a8f2379"). InnerVolumeSpecName "kube-api-access-ldtq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.319653 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "289bda1e-7d3f-4f72-af95-d83f7a8f2379" (UID: "289bda1e-7d3f-4f72-af95-d83f7a8f2379"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.404164 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.404204 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289bda1e-7d3f-4f72-af95-d83f7a8f2379-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.404219 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldtq7\" (UniqueName: \"kubernetes.io/projected/289bda1e-7d3f-4f72-af95-d83f7a8f2379-kube-api-access-ldtq7\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.718819 4813 generic.go:334] "Generic (PLEG): container finished" podID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerID="64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038" exitCode=0 Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.718892 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerDied","Data":"64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038"} Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.718933 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9qh2x" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.718944 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9qh2x" event={"ID":"289bda1e-7d3f-4f72-af95-d83f7a8f2379","Type":"ContainerDied","Data":"1bb2f3934bf1bdffb195ddfb5fc93f133e13644922e64fd7823aa4c0bc003426"} Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.718963 4813 scope.go:117] "RemoveContainer" containerID="64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.746455 4813 scope.go:117] "RemoveContainer" containerID="2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.769522 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qh2x"] Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.779193 4813 scope.go:117] "RemoveContainer" containerID="abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.779643 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9qh2x"] Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.837497 4813 scope.go:117] "RemoveContainer" containerID="64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038" Oct 07 19:54:37 crc kubenswrapper[4813]: E1007 19:54:37.838088 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038\": container with ID starting with 64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038 not found: ID does not exist" containerID="64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.838168 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038"} err="failed to get container status \"64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038\": rpc error: code = NotFound desc = could not find container \"64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038\": container with ID starting with 64f01ad872194bdd64ca1057d8a2c67b15df7353f2fff561af221e657af70038 not found: ID does not exist" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.838218 4813 scope.go:117] "RemoveContainer" containerID="2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db" Oct 07 19:54:37 crc kubenswrapper[4813]: E1007 19:54:37.838671 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db\": container with ID starting with 2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db not found: ID does not exist" containerID="2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.838710 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db"} err="failed to get container status \"2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db\": rpc error: code = NotFound desc = could not find container \"2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db\": container with ID starting with 2fb91cf34bba4aabd9462a850912005ed44e0138122230b76751421ee5ac10db not found: ID does not exist" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.838738 4813 scope.go:117] "RemoveContainer" containerID="abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723" Oct 07 19:54:37 crc kubenswrapper[4813]: E1007 19:54:37.839053 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723\": container with ID starting with abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723 not found: ID does not exist" containerID="abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723" Oct 07 19:54:37 crc kubenswrapper[4813]: I1007 19:54:37.839082 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723"} err="failed to get container status \"abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723\": rpc error: code = NotFound desc = could not find container \"abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723\": container with ID starting with abfebfc609ebb75275395cec950dd04f50542135d974f8891b8380496f5d0723 not found: ID does not exist" Oct 07 19:54:38 crc kubenswrapper[4813]: I1007 19:54:38.615638 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" path="/var/lib/kubelet/pods/289bda1e-7d3f-4f72-af95-d83f7a8f2379/volumes" Oct 07 19:54:44 crc kubenswrapper[4813]: I1007 19:54:44.830934 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:44 crc kubenswrapper[4813]: I1007 19:54:44.901652 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:45 crc kubenswrapper[4813]: I1007 19:54:45.083669 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bzjrz"] Oct 07 19:54:46 crc kubenswrapper[4813]: I1007 19:54:46.831046 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bzjrz" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="registry-server" containerID="cri-o://c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f" gracePeriod=2 Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.313435 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.421365 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-utilities\") pod \"496e79ac-e377-4874-b3c5-57fe92f9262d\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.421461 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwksx\" (UniqueName: \"kubernetes.io/projected/496e79ac-e377-4874-b3c5-57fe92f9262d-kube-api-access-lwksx\") pod \"496e79ac-e377-4874-b3c5-57fe92f9262d\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.421513 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-catalog-content\") pod \"496e79ac-e377-4874-b3c5-57fe92f9262d\" (UID: \"496e79ac-e377-4874-b3c5-57fe92f9262d\") " Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.422228 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-utilities" (OuterVolumeSpecName: "utilities") pod "496e79ac-e377-4874-b3c5-57fe92f9262d" (UID: "496e79ac-e377-4874-b3c5-57fe92f9262d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.431670 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e79ac-e377-4874-b3c5-57fe92f9262d-kube-api-access-lwksx" (OuterVolumeSpecName: "kube-api-access-lwksx") pod "496e79ac-e377-4874-b3c5-57fe92f9262d" (UID: "496e79ac-e377-4874-b3c5-57fe92f9262d"). InnerVolumeSpecName "kube-api-access-lwksx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.506735 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "496e79ac-e377-4874-b3c5-57fe92f9262d" (UID: "496e79ac-e377-4874-b3c5-57fe92f9262d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.523766 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.523793 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwksx\" (UniqueName: \"kubernetes.io/projected/496e79ac-e377-4874-b3c5-57fe92f9262d-kube-api-access-lwksx\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.523804 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/496e79ac-e377-4874-b3c5-57fe92f9262d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.845475 4813 generic.go:334] "Generic (PLEG): container finished" podID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerID="c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f" exitCode=0 Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.845554 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bzjrz" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.845591 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerDied","Data":"c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f"} Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.847028 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bzjrz" event={"ID":"496e79ac-e377-4874-b3c5-57fe92f9262d","Type":"ContainerDied","Data":"f212d34c0695ca626fc3d420e59a14b3c395155f1875217fd83efbb8ceb7d81b"} Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.847098 4813 scope.go:117] "RemoveContainer" containerID="c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.890194 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bzjrz"] Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.894940 4813 scope.go:117] "RemoveContainer" containerID="e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.901968 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bzjrz"] Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.921101 4813 scope.go:117] "RemoveContainer" containerID="cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.965359 4813 scope.go:117] "RemoveContainer" containerID="c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f" Oct 07 19:54:47 crc kubenswrapper[4813]: E1007 19:54:47.965897 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f\": container with ID starting with c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f not found: ID does not exist" containerID="c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.965950 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f"} err="failed to get container status \"c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f\": rpc error: code = NotFound desc = could not find container \"c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f\": container with ID starting with c5d9ab9082b47cff2b1f6d909d4ce9e6e97ed66eb0be5eee0b784668433be14f not found: ID does not exist" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.965984 4813 scope.go:117] "RemoveContainer" containerID="e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a" Oct 07 19:54:47 crc kubenswrapper[4813]: E1007 19:54:47.966397 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a\": container with ID starting with e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a not found: ID does not exist" containerID="e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.966460 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a"} err="failed to get container status \"e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a\": rpc error: code = NotFound desc = could not find container \"e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a\": container with ID starting with e856b4cc41060e70dfea50e43696e4dd3e0872f4b8b3bba2f45122f331b5be2a not found: ID does not exist" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.966520 4813 scope.go:117] "RemoveContainer" containerID="cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a" Oct 07 19:54:47 crc kubenswrapper[4813]: E1007 19:54:47.966940 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a\": container with ID starting with cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a not found: ID does not exist" containerID="cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a" Oct 07 19:54:47 crc kubenswrapper[4813]: I1007 19:54:47.967004 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a"} err="failed to get container status \"cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a\": rpc error: code = NotFound desc = could not find container \"cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a\": container with ID starting with cbd47708791712a5e91660d297070b213292802d8c2c3a008d0f6010c14a1d3a not found: ID does not exist" Oct 07 19:54:48 crc kubenswrapper[4813]: I1007 19:54:48.631767 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" path="/var/lib/kubelet/pods/496e79ac-e377-4874-b3c5-57fe92f9262d/volumes" Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.078669 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.079079 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.079136 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.080028 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.080103 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" gracePeriod=600 Oct 07 19:54:52 crc kubenswrapper[4813]: E1007 19:54:52.203904 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.912548 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" exitCode=0 Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.912593 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75"} Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.912635 4813 scope.go:117] "RemoveContainer" containerID="3469e74d2f3bae111434c4527abbecf7675c22b5856d0240e0ba28b9c5f98470" Oct 07 19:54:52 crc kubenswrapper[4813]: I1007 19:54:52.913260 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:54:52 crc kubenswrapper[4813]: E1007 19:54:52.913546 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:55:05 crc kubenswrapper[4813]: I1007 19:55:05.602859 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:55:05 crc kubenswrapper[4813]: E1007 19:55:05.603736 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:55:20 crc kubenswrapper[4813]: I1007 19:55:20.603050 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:55:20 crc kubenswrapper[4813]: E1007 19:55:20.603928 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:55:33 crc kubenswrapper[4813]: I1007 19:55:33.603521 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:55:33 crc kubenswrapper[4813]: E1007 19:55:33.604730 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:55:48 crc kubenswrapper[4813]: I1007 19:55:48.603754 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:55:48 crc kubenswrapper[4813]: E1007 19:55:48.604722 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:56:03 crc kubenswrapper[4813]: I1007 19:56:03.602653 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:56:03 crc kubenswrapper[4813]: E1007 19:56:03.603632 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:56:18 crc kubenswrapper[4813]: I1007 19:56:18.603185 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:56:18 crc kubenswrapper[4813]: E1007 19:56:18.604341 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:56:32 crc kubenswrapper[4813]: I1007 19:56:32.603088 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:56:32 crc kubenswrapper[4813]: E1007 19:56:32.604041 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:56:46 crc kubenswrapper[4813]: I1007 19:56:46.602731 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:56:46 crc kubenswrapper[4813]: E1007 19:56:46.603807 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:56:58 crc kubenswrapper[4813]: I1007 19:56:58.604990 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:56:58 crc kubenswrapper[4813]: E1007 19:56:58.607123 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:57:09 crc kubenswrapper[4813]: I1007 19:57:09.603262 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:57:09 crc kubenswrapper[4813]: E1007 19:57:09.604560 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:57:21 crc kubenswrapper[4813]: I1007 19:57:21.602986 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:57:21 crc kubenswrapper[4813]: E1007 19:57:21.604049 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:57:33 crc kubenswrapper[4813]: I1007 19:57:33.603374 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:57:33 crc kubenswrapper[4813]: E1007 19:57:33.604406 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:57:44 crc kubenswrapper[4813]: I1007 19:57:44.610453 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:57:44 crc kubenswrapper[4813]: E1007 19:57:44.611396 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:57:58 crc kubenswrapper[4813]: I1007 19:57:58.602842 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:57:58 crc kubenswrapper[4813]: E1007 19:57:58.603857 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:58:12 crc kubenswrapper[4813]: I1007 19:58:12.602973 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:58:12 crc kubenswrapper[4813]: E1007 19:58:12.604384 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:58:23 crc kubenswrapper[4813]: I1007 19:58:23.603109 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:58:23 crc kubenswrapper[4813]: E1007 19:58:23.604079 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:58:38 crc kubenswrapper[4813]: I1007 19:58:38.604763 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:58:38 crc kubenswrapper[4813]: E1007 19:58:38.607226 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:58:52 crc kubenswrapper[4813]: I1007 19:58:52.605841 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:58:52 crc kubenswrapper[4813]: E1007 19:58:52.606850 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:59:06 crc kubenswrapper[4813]: I1007 19:59:06.602433 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:59:06 crc kubenswrapper[4813]: E1007 19:59:06.603029 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:59:10 crc kubenswrapper[4813]: I1007 19:59:10.816796 4813 generic.go:334] "Generic (PLEG): container finished" podID="a4b24290-359e-4973-bf65-53ca4889870d" containerID="2248a170fb16bd47e88a51a1732ef1542317c97deedfbd15debfff9718745e72" exitCode=0 Oct 07 19:59:10 crc kubenswrapper[4813]: I1007 19:59:10.817580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" event={"ID":"a4b24290-359e-4973-bf65-53ca4889870d","Type":"ContainerDied","Data":"2248a170fb16bd47e88a51a1732ef1542317c97deedfbd15debfff9718745e72"} Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.232725 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.351726 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-inventory\") pod \"a4b24290-359e-4973-bf65-53ca4889870d\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.351783 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-combined-ca-bundle\") pod \"a4b24290-359e-4973-bf65-53ca4889870d\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.351905 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-secret-0\") pod \"a4b24290-359e-4973-bf65-53ca4889870d\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.352110 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jv9h\" (UniqueName: \"kubernetes.io/projected/a4b24290-359e-4973-bf65-53ca4889870d-kube-api-access-6jv9h\") pod \"a4b24290-359e-4973-bf65-53ca4889870d\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.352191 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-ssh-key\") pod \"a4b24290-359e-4973-bf65-53ca4889870d\" (UID: \"a4b24290-359e-4973-bf65-53ca4889870d\") " Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.360791 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4b24290-359e-4973-bf65-53ca4889870d-kube-api-access-6jv9h" (OuterVolumeSpecName: "kube-api-access-6jv9h") pod "a4b24290-359e-4973-bf65-53ca4889870d" (UID: "a4b24290-359e-4973-bf65-53ca4889870d"). InnerVolumeSpecName "kube-api-access-6jv9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.361424 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a4b24290-359e-4973-bf65-53ca4889870d" (UID: "a4b24290-359e-4973-bf65-53ca4889870d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.382060 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "a4b24290-359e-4973-bf65-53ca4889870d" (UID: "a4b24290-359e-4973-bf65-53ca4889870d"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.391084 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a4b24290-359e-4973-bf65-53ca4889870d" (UID: "a4b24290-359e-4973-bf65-53ca4889870d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.404744 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-inventory" (OuterVolumeSpecName: "inventory") pod "a4b24290-359e-4973-bf65-53ca4889870d" (UID: "a4b24290-359e-4973-bf65-53ca4889870d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.454671 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jv9h\" (UniqueName: \"kubernetes.io/projected/a4b24290-359e-4973-bf65-53ca4889870d-kube-api-access-6jv9h\") on node \"crc\" DevicePath \"\"" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.454699 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.454708 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.454717 4813 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.454728 4813 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a4b24290-359e-4973-bf65-53ca4889870d-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.840541 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" event={"ID":"a4b24290-359e-4973-bf65-53ca4889870d","Type":"ContainerDied","Data":"1885bd31ba284f64e5a07991d59ff9c75c5b6fa83b1592588ade60c696da9bc2"} Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.840590 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1885bd31ba284f64e5a07991d59ff9c75c5b6fa83b1592588ade60c696da9bc2" Oct 07 19:59:12 crc kubenswrapper[4813]: I1007 19:59:12.840648 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4dn96" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.009268 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt"] Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.009805 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="extract-content" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.009831 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="extract-content" Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.009852 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="registry-server" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.009864 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="registry-server" Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.009891 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="extract-utilities" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.009902 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="extract-utilities" Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.009933 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="registry-server" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.009943 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="registry-server" Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.009965 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="extract-utilities" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.009975 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="extract-utilities" Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.009999 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="extract-content" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.010010 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="extract-content" Oct 07 19:59:13 crc kubenswrapper[4813]: E1007 19:59:13.010026 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4b24290-359e-4973-bf65-53ca4889870d" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.010037 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4b24290-359e-4973-bf65-53ca4889870d" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.010407 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="289bda1e-7d3f-4f72-af95-d83f7a8f2379" containerName="registry-server" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.010453 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="496e79ac-e377-4874-b3c5-57fe92f9262d" containerName="registry-server" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.010472 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4b24290-359e-4973-bf65-53ca4889870d" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.011383 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.016636 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.016928 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.017128 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.017497 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.017688 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.017872 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.023480 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.027093 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt"] Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167466 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167529 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khb5r\" (UniqueName: \"kubernetes.io/projected/658e18b5-93de-4f7b-962b-fcc403470a2c-kube-api-access-khb5r\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167572 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167665 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167688 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167738 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167765 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167848 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.167888 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.270520 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.270607 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khb5r\" (UniqueName: \"kubernetes.io/projected/658e18b5-93de-4f7b-962b-fcc403470a2c-kube-api-access-khb5r\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.270719 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.270805 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.270856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.270979 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.271029 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.271140 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.271235 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.272682 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.277036 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.277591 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.279004 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.281511 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.282355 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.283278 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.298385 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.301280 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khb5r\" (UniqueName: \"kubernetes.io/projected/658e18b5-93de-4f7b-962b-fcc403470a2c-kube-api-access-khb5r\") pod \"nova-edpm-deployment-openstack-edpm-ipam-ltrnt\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.339914 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.946380 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt"] Oct 07 19:59:13 crc kubenswrapper[4813]: I1007 19:59:13.957863 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 19:59:14 crc kubenswrapper[4813]: I1007 19:59:14.870031 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" event={"ID":"658e18b5-93de-4f7b-962b-fcc403470a2c","Type":"ContainerStarted","Data":"5225b9ab842321ea8f23e337fb476060c2f0ede4f180906055c02b61754d998e"} Oct 07 19:59:14 crc kubenswrapper[4813]: I1007 19:59:14.870317 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" event={"ID":"658e18b5-93de-4f7b-962b-fcc403470a2c","Type":"ContainerStarted","Data":"99597c0471069865c6c12d15519fbbccb64bbe113031575660bb28b01a669237"} Oct 07 19:59:14 crc kubenswrapper[4813]: I1007 19:59:14.898578 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" podStartSLOduration=2.450438852 podStartE2EDuration="2.898560603s" podCreationTimestamp="2025-10-07 19:59:12 +0000 UTC" firstStartedPulling="2025-10-07 19:59:13.957542385 +0000 UTC m=+2480.035798016" lastFinishedPulling="2025-10-07 19:59:14.405664146 +0000 UTC m=+2480.483919767" observedRunningTime="2025-10-07 19:59:14.89392508 +0000 UTC m=+2480.972180691" watchObservedRunningTime="2025-10-07 19:59:14.898560603 +0000 UTC m=+2480.976816214" Oct 07 19:59:17 crc kubenswrapper[4813]: I1007 19:59:17.603492 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:59:17 crc kubenswrapper[4813]: E1007 19:59:17.604255 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:59:28 crc kubenswrapper[4813]: I1007 19:59:28.604527 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:59:28 crc kubenswrapper[4813]: E1007 19:59:28.605718 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:59:42 crc kubenswrapper[4813]: I1007 19:59:42.604318 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:59:42 crc kubenswrapper[4813]: E1007 19:59:42.605455 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 19:59:55 crc kubenswrapper[4813]: I1007 19:59:55.603184 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 19:59:56 crc kubenswrapper[4813]: I1007 19:59:56.328132 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"ada930491aada224a82dc8e8b99f10d8a68a21da97fc7faee9ef3c114a69a7ee"} Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.161643 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx"] Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.164610 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.175581 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.176024 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.195207 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx"] Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.288775 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77205618-7092-4fbd-93c3-e05859125f22-config-volume\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.288908 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb2hv\" (UniqueName: \"kubernetes.io/projected/77205618-7092-4fbd-93c3-e05859125f22-kube-api-access-qb2hv\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.288949 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77205618-7092-4fbd-93c3-e05859125f22-secret-volume\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.392563 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77205618-7092-4fbd-93c3-e05859125f22-config-volume\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.392807 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb2hv\" (UniqueName: \"kubernetes.io/projected/77205618-7092-4fbd-93c3-e05859125f22-kube-api-access-qb2hv\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.392882 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77205618-7092-4fbd-93c3-e05859125f22-secret-volume\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.393701 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77205618-7092-4fbd-93c3-e05859125f22-config-volume\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.404235 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77205618-7092-4fbd-93c3-e05859125f22-secret-volume\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.413347 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb2hv\" (UniqueName: \"kubernetes.io/projected/77205618-7092-4fbd-93c3-e05859125f22-kube-api-access-qb2hv\") pod \"collect-profiles-29331120-kfqzx\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:00 crc kubenswrapper[4813]: I1007 20:00:00.506154 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:01 crc kubenswrapper[4813]: I1007 20:00:01.027192 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx"] Oct 07 20:00:01 crc kubenswrapper[4813]: I1007 20:00:01.390864 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" event={"ID":"77205618-7092-4fbd-93c3-e05859125f22","Type":"ContainerStarted","Data":"1b6eab955a883f9b1b253681b9f677749daeee299029aec5642ebfb6cf1dc7ca"} Oct 07 20:00:01 crc kubenswrapper[4813]: I1007 20:00:01.391194 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" event={"ID":"77205618-7092-4fbd-93c3-e05859125f22","Type":"ContainerStarted","Data":"bf9d1ca38b2586fd8e72cf34d757bc9184f4fb9d1a76e1bdc7370bb988159eed"} Oct 07 20:00:01 crc kubenswrapper[4813]: I1007 20:00:01.410078 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" podStartSLOduration=1.410044488 podStartE2EDuration="1.410044488s" podCreationTimestamp="2025-10-07 20:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:00:01.405100596 +0000 UTC m=+2527.483356207" watchObservedRunningTime="2025-10-07 20:00:01.410044488 +0000 UTC m=+2527.488300099" Oct 07 20:00:02 crc kubenswrapper[4813]: I1007 20:00:02.400219 4813 generic.go:334] "Generic (PLEG): container finished" podID="77205618-7092-4fbd-93c3-e05859125f22" containerID="1b6eab955a883f9b1b253681b9f677749daeee299029aec5642ebfb6cf1dc7ca" exitCode=0 Oct 07 20:00:02 crc kubenswrapper[4813]: I1007 20:00:02.400516 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" event={"ID":"77205618-7092-4fbd-93c3-e05859125f22","Type":"ContainerDied","Data":"1b6eab955a883f9b1b253681b9f677749daeee299029aec5642ebfb6cf1dc7ca"} Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.718809 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.859548 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb2hv\" (UniqueName: \"kubernetes.io/projected/77205618-7092-4fbd-93c3-e05859125f22-kube-api-access-qb2hv\") pod \"77205618-7092-4fbd-93c3-e05859125f22\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.859594 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77205618-7092-4fbd-93c3-e05859125f22-config-volume\") pod \"77205618-7092-4fbd-93c3-e05859125f22\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.859646 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77205618-7092-4fbd-93c3-e05859125f22-secret-volume\") pod \"77205618-7092-4fbd-93c3-e05859125f22\" (UID: \"77205618-7092-4fbd-93c3-e05859125f22\") " Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.860796 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77205618-7092-4fbd-93c3-e05859125f22-config-volume" (OuterVolumeSpecName: "config-volume") pod "77205618-7092-4fbd-93c3-e05859125f22" (UID: "77205618-7092-4fbd-93c3-e05859125f22"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.865523 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77205618-7092-4fbd-93c3-e05859125f22-kube-api-access-qb2hv" (OuterVolumeSpecName: "kube-api-access-qb2hv") pod "77205618-7092-4fbd-93c3-e05859125f22" (UID: "77205618-7092-4fbd-93c3-e05859125f22"). InnerVolumeSpecName "kube-api-access-qb2hv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.872463 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77205618-7092-4fbd-93c3-e05859125f22-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "77205618-7092-4fbd-93c3-e05859125f22" (UID: "77205618-7092-4fbd-93c3-e05859125f22"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.961285 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb2hv\" (UniqueName: \"kubernetes.io/projected/77205618-7092-4fbd-93c3-e05859125f22-kube-api-access-qb2hv\") on node \"crc\" DevicePath \"\"" Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.961315 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77205618-7092-4fbd-93c3-e05859125f22-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 20:00:03 crc kubenswrapper[4813]: I1007 20:00:03.961340 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77205618-7092-4fbd-93c3-e05859125f22-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 20:00:04 crc kubenswrapper[4813]: I1007 20:00:04.416931 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" event={"ID":"77205618-7092-4fbd-93c3-e05859125f22","Type":"ContainerDied","Data":"bf9d1ca38b2586fd8e72cf34d757bc9184f4fb9d1a76e1bdc7370bb988159eed"} Oct 07 20:00:04 crc kubenswrapper[4813]: I1007 20:00:04.416969 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf9d1ca38b2586fd8e72cf34d757bc9184f4fb9d1a76e1bdc7370bb988159eed" Oct 07 20:00:04 crc kubenswrapper[4813]: I1007 20:00:04.417019 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331120-kfqzx" Oct 07 20:00:04 crc kubenswrapper[4813]: I1007 20:00:04.483023 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m"] Oct 07 20:00:04 crc kubenswrapper[4813]: I1007 20:00:04.491787 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331075-s945m"] Oct 07 20:00:04 crc kubenswrapper[4813]: I1007 20:00:04.617855 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99a37f2e-fef1-47f1-ac60-6504a968ebf8" path="/var/lib/kubelet/pods/99a37f2e-fef1-47f1-ac60-6504a968ebf8/volumes" Oct 07 20:00:09 crc kubenswrapper[4813]: I1007 20:00:09.956155 4813 scope.go:117] "RemoveContainer" containerID="2eab444bdfada98cb65c27936ef25746a95b220af4227fde68f542eb8b06e3ca" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.197608 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29331121-7mg5t"] Oct 07 20:01:00 crc kubenswrapper[4813]: E1007 20:01:00.199608 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77205618-7092-4fbd-93c3-e05859125f22" containerName="collect-profiles" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.199834 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="77205618-7092-4fbd-93c3-e05859125f22" containerName="collect-profiles" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.201485 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="77205618-7092-4fbd-93c3-e05859125f22" containerName="collect-profiles" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.202477 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.203117 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29331121-7mg5t"] Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.378392 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-fernet-keys\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.378644 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-config-data\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.378748 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49f4k\" (UniqueName: \"kubernetes.io/projected/f060a35b-a8f6-4392-82bf-9e557928512c-kube-api-access-49f4k\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.378854 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-combined-ca-bundle\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.481424 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-fernet-keys\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.481802 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-config-data\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.481887 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49f4k\" (UniqueName: \"kubernetes.io/projected/f060a35b-a8f6-4392-82bf-9e557928512c-kube-api-access-49f4k\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.481976 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-combined-ca-bundle\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.488810 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-combined-ca-bundle\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.490069 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-fernet-keys\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.501629 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-config-data\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.502675 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49f4k\" (UniqueName: \"kubernetes.io/projected/f060a35b-a8f6-4392-82bf-9e557928512c-kube-api-access-49f4k\") pod \"keystone-cron-29331121-7mg5t\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.536630 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:00 crc kubenswrapper[4813]: I1007 20:01:00.856084 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29331121-7mg5t"] Oct 07 20:01:01 crc kubenswrapper[4813]: I1007 20:01:01.037791 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331121-7mg5t" event={"ID":"f060a35b-a8f6-4392-82bf-9e557928512c","Type":"ContainerStarted","Data":"d42c9de288dde17c61843c425d8facb87e926f44d62fac35ce0c34b75555bcc2"} Oct 07 20:01:02 crc kubenswrapper[4813]: I1007 20:01:02.053405 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331121-7mg5t" event={"ID":"f060a35b-a8f6-4392-82bf-9e557928512c","Type":"ContainerStarted","Data":"332b86e605e2aab65b7a76e3f67a976576ec65503822621b86457553b671dd82"} Oct 07 20:01:02 crc kubenswrapper[4813]: I1007 20:01:02.076189 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29331121-7mg5t" podStartSLOduration=2.076172094 podStartE2EDuration="2.076172094s" podCreationTimestamp="2025-10-07 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:01:02.075804894 +0000 UTC m=+2588.154060535" watchObservedRunningTime="2025-10-07 20:01:02.076172094 +0000 UTC m=+2588.154427705" Oct 07 20:01:05 crc kubenswrapper[4813]: I1007 20:01:05.080501 4813 generic.go:334] "Generic (PLEG): container finished" podID="f060a35b-a8f6-4392-82bf-9e557928512c" containerID="332b86e605e2aab65b7a76e3f67a976576ec65503822621b86457553b671dd82" exitCode=0 Oct 07 20:01:05 crc kubenswrapper[4813]: I1007 20:01:05.080580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331121-7mg5t" event={"ID":"f060a35b-a8f6-4392-82bf-9e557928512c","Type":"ContainerDied","Data":"332b86e605e2aab65b7a76e3f67a976576ec65503822621b86457553b671dd82"} Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.458538 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.603130 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-config-data\") pod \"f060a35b-a8f6-4392-82bf-9e557928512c\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.603292 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-fernet-keys\") pod \"f060a35b-a8f6-4392-82bf-9e557928512c\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.603684 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49f4k\" (UniqueName: \"kubernetes.io/projected/f060a35b-a8f6-4392-82bf-9e557928512c-kube-api-access-49f4k\") pod \"f060a35b-a8f6-4392-82bf-9e557928512c\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.603747 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-combined-ca-bundle\") pod \"f060a35b-a8f6-4392-82bf-9e557928512c\" (UID: \"f060a35b-a8f6-4392-82bf-9e557928512c\") " Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.610354 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f060a35b-a8f6-4392-82bf-9e557928512c-kube-api-access-49f4k" (OuterVolumeSpecName: "kube-api-access-49f4k") pod "f060a35b-a8f6-4392-82bf-9e557928512c" (UID: "f060a35b-a8f6-4392-82bf-9e557928512c"). InnerVolumeSpecName "kube-api-access-49f4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.618448 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f060a35b-a8f6-4392-82bf-9e557928512c" (UID: "f060a35b-a8f6-4392-82bf-9e557928512c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.663066 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-config-data" (OuterVolumeSpecName: "config-data") pod "f060a35b-a8f6-4392-82bf-9e557928512c" (UID: "f060a35b-a8f6-4392-82bf-9e557928512c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.666638 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f060a35b-a8f6-4392-82bf-9e557928512c" (UID: "f060a35b-a8f6-4392-82bf-9e557928512c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.706934 4813 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.707263 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49f4k\" (UniqueName: \"kubernetes.io/projected/f060a35b-a8f6-4392-82bf-9e557928512c-kube-api-access-49f4k\") on node \"crc\" DevicePath \"\"" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.707623 4813 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 20:01:06 crc kubenswrapper[4813]: I1007 20:01:06.707823 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f060a35b-a8f6-4392-82bf-9e557928512c-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 20:01:07 crc kubenswrapper[4813]: I1007 20:01:07.119932 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29331121-7mg5t" event={"ID":"f060a35b-a8f6-4392-82bf-9e557928512c","Type":"ContainerDied","Data":"d42c9de288dde17c61843c425d8facb87e926f44d62fac35ce0c34b75555bcc2"} Oct 07 20:01:07 crc kubenswrapper[4813]: I1007 20:01:07.119986 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d42c9de288dde17c61843c425d8facb87e926f44d62fac35ce0c34b75555bcc2" Oct 07 20:01:07 crc kubenswrapper[4813]: I1007 20:01:07.120059 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29331121-7mg5t" Oct 07 20:02:22 crc kubenswrapper[4813]: I1007 20:02:22.078772 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:02:22 crc kubenswrapper[4813]: I1007 20:02:22.079443 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:02:52 crc kubenswrapper[4813]: I1007 20:02:52.078946 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:02:52 crc kubenswrapper[4813]: I1007 20:02:52.080426 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.475953 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zmssg"] Oct 07 20:03:03 crc kubenswrapper[4813]: E1007 20:03:03.476896 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f060a35b-a8f6-4392-82bf-9e557928512c" containerName="keystone-cron" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.476913 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f060a35b-a8f6-4392-82bf-9e557928512c" containerName="keystone-cron" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.477155 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f060a35b-a8f6-4392-82bf-9e557928512c" containerName="keystone-cron" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.480280 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.539154 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmssg"] Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.581278 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-utilities\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.581364 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-catalog-content\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.581387 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5kk9\" (UniqueName: \"kubernetes.io/projected/8b986a47-da83-4695-9b7f-8a398f1550fe-kube-api-access-f5kk9\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.683813 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-utilities\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.683915 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-catalog-content\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.683942 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5kk9\" (UniqueName: \"kubernetes.io/projected/8b986a47-da83-4695-9b7f-8a398f1550fe-kube-api-access-f5kk9\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.684250 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-utilities\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.684386 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-catalog-content\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.711524 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5kk9\" (UniqueName: \"kubernetes.io/projected/8b986a47-da83-4695-9b7f-8a398f1550fe-kube-api-access-f5kk9\") pod \"community-operators-zmssg\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:03 crc kubenswrapper[4813]: I1007 20:03:03.802895 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:04 crc kubenswrapper[4813]: I1007 20:03:04.135293 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmssg"] Oct 07 20:03:04 crc kubenswrapper[4813]: I1007 20:03:04.463406 4813 generic.go:334] "Generic (PLEG): container finished" podID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerID="3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c" exitCode=0 Oct 07 20:03:04 crc kubenswrapper[4813]: I1007 20:03:04.463524 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerDied","Data":"3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c"} Oct 07 20:03:04 crc kubenswrapper[4813]: I1007 20:03:04.463731 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerStarted","Data":"15274d007dd78fe1eade61314d1af18ee99dec2edca7d36d1ea2f1d5b57866d5"} Oct 07 20:03:05 crc kubenswrapper[4813]: I1007 20:03:05.479700 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerStarted","Data":"5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb"} Oct 07 20:03:06 crc kubenswrapper[4813]: I1007 20:03:06.497174 4813 generic.go:334] "Generic (PLEG): container finished" podID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerID="5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb" exitCode=0 Oct 07 20:03:06 crc kubenswrapper[4813]: I1007 20:03:06.497254 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerDied","Data":"5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb"} Oct 07 20:03:07 crc kubenswrapper[4813]: I1007 20:03:07.510108 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerStarted","Data":"9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290"} Oct 07 20:03:07 crc kubenswrapper[4813]: I1007 20:03:07.529687 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zmssg" podStartSLOduration=2.093438154 podStartE2EDuration="4.529671439s" podCreationTimestamp="2025-10-07 20:03:03 +0000 UTC" firstStartedPulling="2025-10-07 20:03:04.465539363 +0000 UTC m=+2710.543794964" lastFinishedPulling="2025-10-07 20:03:06.901772638 +0000 UTC m=+2712.980028249" observedRunningTime="2025-10-07 20:03:07.526884789 +0000 UTC m=+2713.605140410" watchObservedRunningTime="2025-10-07 20:03:07.529671439 +0000 UTC m=+2713.607927050" Oct 07 20:03:12 crc kubenswrapper[4813]: I1007 20:03:12.560780 4813 generic.go:334] "Generic (PLEG): container finished" podID="658e18b5-93de-4f7b-962b-fcc403470a2c" containerID="5225b9ab842321ea8f23e337fb476060c2f0ede4f180906055c02b61754d998e" exitCode=0 Oct 07 20:03:12 crc kubenswrapper[4813]: I1007 20:03:12.560861 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" event={"ID":"658e18b5-93de-4f7b-962b-fcc403470a2c","Type":"ContainerDied","Data":"5225b9ab842321ea8f23e337fb476060c2f0ede4f180906055c02b61754d998e"} Oct 07 20:03:13 crc kubenswrapper[4813]: I1007 20:03:13.803340 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:13 crc kubenswrapper[4813]: I1007 20:03:13.803659 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:13 crc kubenswrapper[4813]: I1007 20:03:13.866474 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.021532 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.129978 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-1\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130083 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-1\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130179 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-combined-ca-bundle\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130211 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-inventory\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130257 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-0\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130420 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-ssh-key\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130523 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khb5r\" (UniqueName: \"kubernetes.io/projected/658e18b5-93de-4f7b-962b-fcc403470a2c-kube-api-access-khb5r\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130561 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-extra-config-0\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.130593 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-0\") pod \"658e18b5-93de-4f7b-962b-fcc403470a2c\" (UID: \"658e18b5-93de-4f7b-962b-fcc403470a2c\") " Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.138202 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/658e18b5-93de-4f7b-962b-fcc403470a2c-kube-api-access-khb5r" (OuterVolumeSpecName: "kube-api-access-khb5r") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "kube-api-access-khb5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.151290 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.173680 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-inventory" (OuterVolumeSpecName: "inventory") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.174774 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.183814 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.187755 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.187966 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.196832 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.199358 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "658e18b5-93de-4f7b-962b-fcc403470a2c" (UID: "658e18b5-93de-4f7b-962b-fcc403470a2c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233366 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khb5r\" (UniqueName: \"kubernetes.io/projected/658e18b5-93de-4f7b-962b-fcc403470a2c-kube-api-access-khb5r\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233394 4813 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233403 4813 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233411 4813 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233419 4813 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233428 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233438 4813 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233447 4813 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.233457 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/658e18b5-93de-4f7b-962b-fcc403470a2c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.586143 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.586181 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-ltrnt" event={"ID":"658e18b5-93de-4f7b-962b-fcc403470a2c","Type":"ContainerDied","Data":"99597c0471069865c6c12d15519fbbccb64bbe113031575660bb28b01a669237"} Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.586835 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99597c0471069865c6c12d15519fbbccb64bbe113031575660bb28b01a669237" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.686061 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.700765 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7"] Oct 07 20:03:14 crc kubenswrapper[4813]: E1007 20:03:14.701197 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="658e18b5-93de-4f7b-962b-fcc403470a2c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.701216 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="658e18b5-93de-4f7b-962b-fcc403470a2c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.705595 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="658e18b5-93de-4f7b-962b-fcc403470a2c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.706231 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.709277 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.709849 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.710050 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.710169 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8sxtv" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.710291 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.725894 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7"] Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.779719 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmssg"] Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848451 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848597 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8zm2\" (UniqueName: \"kubernetes.io/projected/07dc752d-c126-4085-9367-ca8bcee2c1ec-kube-api-access-m8zm2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848644 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848681 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848808 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848857 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.848995 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950475 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950524 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8zm2\" (UniqueName: \"kubernetes.io/projected/07dc752d-c126-4085-9367-ca8bcee2c1ec-kube-api-access-m8zm2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950584 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950618 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950644 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950660 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.950719 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.954930 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.955923 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.957780 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.958462 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.961589 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.965038 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:14 crc kubenswrapper[4813]: I1007 20:03:14.976030 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8zm2\" (UniqueName: \"kubernetes.io/projected/07dc752d-c126-4085-9367-ca8bcee2c1ec-kube-api-access-m8zm2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:15 crc kubenswrapper[4813]: I1007 20:03:15.021840 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:03:15 crc kubenswrapper[4813]: I1007 20:03:15.614368 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7"] Oct 07 20:03:16 crc kubenswrapper[4813]: I1007 20:03:16.614543 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zmssg" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="registry-server" containerID="cri-o://9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290" gracePeriod=2 Oct 07 20:03:16 crc kubenswrapper[4813]: I1007 20:03:16.615615 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" event={"ID":"07dc752d-c126-4085-9367-ca8bcee2c1ec","Type":"ContainerStarted","Data":"fe93d7f46e16da7ded95e354d8b348f08562cd83081f81a0c17968e98049b3d5"} Oct 07 20:03:16 crc kubenswrapper[4813]: I1007 20:03:16.615648 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" event={"ID":"07dc752d-c126-4085-9367-ca8bcee2c1ec","Type":"ContainerStarted","Data":"f31b3d8acc1062dacbd1b71bf13334b679a0d323c466cde5b6a9660b0fded1b2"} Oct 07 20:03:16 crc kubenswrapper[4813]: I1007 20:03:16.646795 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" podStartSLOduration=2.113237565 podStartE2EDuration="2.646769743s" podCreationTimestamp="2025-10-07 20:03:14 +0000 UTC" firstStartedPulling="2025-10-07 20:03:15.634114752 +0000 UTC m=+2721.712370353" lastFinishedPulling="2025-10-07 20:03:16.16764692 +0000 UTC m=+2722.245902531" observedRunningTime="2025-10-07 20:03:16.639813344 +0000 UTC m=+2722.718068955" watchObservedRunningTime="2025-10-07 20:03:16.646769743 +0000 UTC m=+2722.725025354" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.017295 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.122521 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-utilities\") pod \"8b986a47-da83-4695-9b7f-8a398f1550fe\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.122608 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5kk9\" (UniqueName: \"kubernetes.io/projected/8b986a47-da83-4695-9b7f-8a398f1550fe-kube-api-access-f5kk9\") pod \"8b986a47-da83-4695-9b7f-8a398f1550fe\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.122629 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-catalog-content\") pod \"8b986a47-da83-4695-9b7f-8a398f1550fe\" (UID: \"8b986a47-da83-4695-9b7f-8a398f1550fe\") " Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.123785 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-utilities" (OuterVolumeSpecName: "utilities") pod "8b986a47-da83-4695-9b7f-8a398f1550fe" (UID: "8b986a47-da83-4695-9b7f-8a398f1550fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.132296 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b986a47-da83-4695-9b7f-8a398f1550fe-kube-api-access-f5kk9" (OuterVolumeSpecName: "kube-api-access-f5kk9") pod "8b986a47-da83-4695-9b7f-8a398f1550fe" (UID: "8b986a47-da83-4695-9b7f-8a398f1550fe"). InnerVolumeSpecName "kube-api-access-f5kk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.173793 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b986a47-da83-4695-9b7f-8a398f1550fe" (UID: "8b986a47-da83-4695-9b7f-8a398f1550fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.224982 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.225010 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5kk9\" (UniqueName: \"kubernetes.io/projected/8b986a47-da83-4695-9b7f-8a398f1550fe-kube-api-access-f5kk9\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.225020 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b986a47-da83-4695-9b7f-8a398f1550fe-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.624990 4813 generic.go:334] "Generic (PLEG): container finished" podID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerID="9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290" exitCode=0 Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.625061 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmssg" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.625077 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerDied","Data":"9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290"} Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.625504 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmssg" event={"ID":"8b986a47-da83-4695-9b7f-8a398f1550fe","Type":"ContainerDied","Data":"15274d007dd78fe1eade61314d1af18ee99dec2edca7d36d1ea2f1d5b57866d5"} Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.625562 4813 scope.go:117] "RemoveContainer" containerID="9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.657080 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmssg"] Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.657397 4813 scope.go:117] "RemoveContainer" containerID="5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.664802 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zmssg"] Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.680453 4813 scope.go:117] "RemoveContainer" containerID="3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.727363 4813 scope.go:117] "RemoveContainer" containerID="9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290" Oct 07 20:03:17 crc kubenswrapper[4813]: E1007 20:03:17.727896 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290\": container with ID starting with 9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290 not found: ID does not exist" containerID="9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.727946 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290"} err="failed to get container status \"9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290\": rpc error: code = NotFound desc = could not find container \"9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290\": container with ID starting with 9e9f08ba73af429740106f0294632f0b295a2434dc9a515570cf1b455f179290 not found: ID does not exist" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.727974 4813 scope.go:117] "RemoveContainer" containerID="5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb" Oct 07 20:03:17 crc kubenswrapper[4813]: E1007 20:03:17.728367 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb\": container with ID starting with 5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb not found: ID does not exist" containerID="5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.728418 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb"} err="failed to get container status \"5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb\": rpc error: code = NotFound desc = could not find container \"5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb\": container with ID starting with 5e7c1c73e754933311389d8da5c7bf67b3731cd9b5bf725c0cbd942eb121dadb not found: ID does not exist" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.728447 4813 scope.go:117] "RemoveContainer" containerID="3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c" Oct 07 20:03:17 crc kubenswrapper[4813]: E1007 20:03:17.728733 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c\": container with ID starting with 3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c not found: ID does not exist" containerID="3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c" Oct 07 20:03:17 crc kubenswrapper[4813]: I1007 20:03:17.728764 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c"} err="failed to get container status \"3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c\": rpc error: code = NotFound desc = could not find container \"3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c\": container with ID starting with 3727ef123f35ffc628bfeec37b449051b6238cbcecf77aa69af17c15de87f17c not found: ID does not exist" Oct 07 20:03:18 crc kubenswrapper[4813]: I1007 20:03:18.618234 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" path="/var/lib/kubelet/pods/8b986a47-da83-4695-9b7f-8a398f1550fe/volumes" Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.079145 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.079771 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.079835 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.080760 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ada930491aada224a82dc8e8b99f10d8a68a21da97fc7faee9ef3c114a69a7ee"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.080849 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://ada930491aada224a82dc8e8b99f10d8a68a21da97fc7faee9ef3c114a69a7ee" gracePeriod=600 Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.693244 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="ada930491aada224a82dc8e8b99f10d8a68a21da97fc7faee9ef3c114a69a7ee" exitCode=0 Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.693539 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"ada930491aada224a82dc8e8b99f10d8a68a21da97fc7faee9ef3c114a69a7ee"} Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.693660 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87"} Oct 07 20:03:22 crc kubenswrapper[4813]: I1007 20:03:22.693678 4813 scope.go:117] "RemoveContainer" containerID="1ef1fd8ad9356a9d6ee79db3166c756ec963ebd1fceba92ec534c03658b95d75" Oct 07 20:05:05 crc kubenswrapper[4813]: I1007 20:05:05.989970 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g8kkt"] Oct 07 20:05:05 crc kubenswrapper[4813]: E1007 20:05:05.991994 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="extract-content" Oct 07 20:05:05 crc kubenswrapper[4813]: I1007 20:05:05.992017 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="extract-content" Oct 07 20:05:05 crc kubenswrapper[4813]: E1007 20:05:05.992050 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="extract-utilities" Oct 07 20:05:05 crc kubenswrapper[4813]: I1007 20:05:05.992057 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="extract-utilities" Oct 07 20:05:05 crc kubenswrapper[4813]: E1007 20:05:05.992110 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="registry-server" Oct 07 20:05:05 crc kubenswrapper[4813]: I1007 20:05:05.992119 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="registry-server" Oct 07 20:05:05 crc kubenswrapper[4813]: I1007 20:05:05.992537 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b986a47-da83-4695-9b7f-8a398f1550fe" containerName="registry-server" Oct 07 20:05:05 crc kubenswrapper[4813]: I1007 20:05:05.995385 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.028826 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g8kkt"] Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.090948 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-utilities\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.091002 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xntnp\" (UniqueName: \"kubernetes.io/projected/ba0a7bec-00b5-4299-b89a-d23386ca6e93-kube-api-access-xntnp\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.091424 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-catalog-content\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.192654 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-catalog-content\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.192728 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-utilities\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.192752 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xntnp\" (UniqueName: \"kubernetes.io/projected/ba0a7bec-00b5-4299-b89a-d23386ca6e93-kube-api-access-xntnp\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.193544 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-utilities\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.193757 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-catalog-content\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.217117 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xntnp\" (UniqueName: \"kubernetes.io/projected/ba0a7bec-00b5-4299-b89a-d23386ca6e93-kube-api-access-xntnp\") pod \"redhat-marketplace-g8kkt\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.344045 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:06 crc kubenswrapper[4813]: I1007 20:05:06.880462 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g8kkt"] Oct 07 20:05:07 crc kubenswrapper[4813]: I1007 20:05:07.828478 4813 generic.go:334] "Generic (PLEG): container finished" podID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerID="a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b" exitCode=0 Oct 07 20:05:07 crc kubenswrapper[4813]: I1007 20:05:07.829138 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g8kkt" event={"ID":"ba0a7bec-00b5-4299-b89a-d23386ca6e93","Type":"ContainerDied","Data":"a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b"} Oct 07 20:05:07 crc kubenswrapper[4813]: I1007 20:05:07.829193 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g8kkt" event={"ID":"ba0a7bec-00b5-4299-b89a-d23386ca6e93","Type":"ContainerStarted","Data":"bb13831c1d2cad9183e05b44c00a7c4a6d24a40d6ab51d8563bb5884e351b4f7"} Oct 07 20:05:07 crc kubenswrapper[4813]: I1007 20:05:07.831986 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 20:05:09 crc kubenswrapper[4813]: I1007 20:05:09.854237 4813 generic.go:334] "Generic (PLEG): container finished" podID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerID="6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525" exitCode=0 Oct 07 20:05:09 crc kubenswrapper[4813]: I1007 20:05:09.854302 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g8kkt" event={"ID":"ba0a7bec-00b5-4299-b89a-d23386ca6e93","Type":"ContainerDied","Data":"6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525"} Oct 07 20:05:10 crc kubenswrapper[4813]: I1007 20:05:10.866753 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g8kkt" event={"ID":"ba0a7bec-00b5-4299-b89a-d23386ca6e93","Type":"ContainerStarted","Data":"54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb"} Oct 07 20:05:10 crc kubenswrapper[4813]: I1007 20:05:10.893491 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g8kkt" podStartSLOduration=3.329135705 podStartE2EDuration="5.893465306s" podCreationTimestamp="2025-10-07 20:05:05 +0000 UTC" firstStartedPulling="2025-10-07 20:05:07.831591981 +0000 UTC m=+2833.909847632" lastFinishedPulling="2025-10-07 20:05:10.395921612 +0000 UTC m=+2836.474177233" observedRunningTime="2025-10-07 20:05:10.888203395 +0000 UTC m=+2836.966459006" watchObservedRunningTime="2025-10-07 20:05:10.893465306 +0000 UTC m=+2836.971720937" Oct 07 20:05:16 crc kubenswrapper[4813]: I1007 20:05:16.344092 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:16 crc kubenswrapper[4813]: I1007 20:05:16.344540 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:16 crc kubenswrapper[4813]: I1007 20:05:16.396483 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:17 crc kubenswrapper[4813]: I1007 20:05:17.003094 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:17 crc kubenswrapper[4813]: I1007 20:05:17.054023 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g8kkt"] Oct 07 20:05:18 crc kubenswrapper[4813]: I1007 20:05:18.949987 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g8kkt" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="registry-server" containerID="cri-o://54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb" gracePeriod=2 Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.510430 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.599375 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-catalog-content\") pod \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.599761 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xntnp\" (UniqueName: \"kubernetes.io/projected/ba0a7bec-00b5-4299-b89a-d23386ca6e93-kube-api-access-xntnp\") pod \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.599912 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-utilities\") pod \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\" (UID: \"ba0a7bec-00b5-4299-b89a-d23386ca6e93\") " Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.601041 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-utilities" (OuterVolumeSpecName: "utilities") pod "ba0a7bec-00b5-4299-b89a-d23386ca6e93" (UID: "ba0a7bec-00b5-4299-b89a-d23386ca6e93"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.621472 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba0a7bec-00b5-4299-b89a-d23386ca6e93-kube-api-access-xntnp" (OuterVolumeSpecName: "kube-api-access-xntnp") pod "ba0a7bec-00b5-4299-b89a-d23386ca6e93" (UID: "ba0a7bec-00b5-4299-b89a-d23386ca6e93"). InnerVolumeSpecName "kube-api-access-xntnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.623060 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba0a7bec-00b5-4299-b89a-d23386ca6e93" (UID: "ba0a7bec-00b5-4299-b89a-d23386ca6e93"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.701819 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.701852 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xntnp\" (UniqueName: \"kubernetes.io/projected/ba0a7bec-00b5-4299-b89a-d23386ca6e93-kube-api-access-xntnp\") on node \"crc\" DevicePath \"\"" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.701864 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba0a7bec-00b5-4299-b89a-d23386ca6e93-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.962830 4813 generic.go:334] "Generic (PLEG): container finished" podID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerID="54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb" exitCode=0 Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.962892 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g8kkt" event={"ID":"ba0a7bec-00b5-4299-b89a-d23386ca6e93","Type":"ContainerDied","Data":"54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb"} Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.962933 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g8kkt" event={"ID":"ba0a7bec-00b5-4299-b89a-d23386ca6e93","Type":"ContainerDied","Data":"bb13831c1d2cad9183e05b44c00a7c4a6d24a40d6ab51d8563bb5884e351b4f7"} Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.962962 4813 scope.go:117] "RemoveContainer" containerID="54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.964666 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g8kkt" Oct 07 20:05:19 crc kubenswrapper[4813]: I1007 20:05:19.997070 4813 scope.go:117] "RemoveContainer" containerID="6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.048271 4813 scope.go:117] "RemoveContainer" containerID="a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.061130 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g8kkt"] Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.081006 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g8kkt"] Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.105555 4813 scope.go:117] "RemoveContainer" containerID="54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb" Oct 07 20:05:20 crc kubenswrapper[4813]: E1007 20:05:20.106089 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb\": container with ID starting with 54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb not found: ID does not exist" containerID="54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.106203 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb"} err="failed to get container status \"54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb\": rpc error: code = NotFound desc = could not find container \"54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb\": container with ID starting with 54d42e286be2797ce813139baea0f3e0363039579abdd5b7b0f535dcca5dbefb not found: ID does not exist" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.106303 4813 scope.go:117] "RemoveContainer" containerID="6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525" Oct 07 20:05:20 crc kubenswrapper[4813]: E1007 20:05:20.106648 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525\": container with ID starting with 6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525 not found: ID does not exist" containerID="6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.106773 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525"} err="failed to get container status \"6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525\": rpc error: code = NotFound desc = could not find container \"6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525\": container with ID starting with 6f5d434669024a942a3fac0d899d620d5a0ea17183de9ca45e11aeabe944d525 not found: ID does not exist" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.106871 4813 scope.go:117] "RemoveContainer" containerID="a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b" Oct 07 20:05:20 crc kubenswrapper[4813]: E1007 20:05:20.107235 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b\": container with ID starting with a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b not found: ID does not exist" containerID="a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.107393 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b"} err="failed to get container status \"a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b\": rpc error: code = NotFound desc = could not find container \"a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b\": container with ID starting with a1c83d9787053677643100faf44953b0ad8775edfc96cc45f53a45da6628a51b not found: ID does not exist" Oct 07 20:05:20 crc kubenswrapper[4813]: I1007 20:05:20.621063 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" path="/var/lib/kubelet/pods/ba0a7bec-00b5-4299-b89a-d23386ca6e93/volumes" Oct 07 20:05:22 crc kubenswrapper[4813]: I1007 20:05:22.079634 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:05:22 crc kubenswrapper[4813]: I1007 20:05:22.082526 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:05:52 crc kubenswrapper[4813]: I1007 20:05:52.109157 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:05:52 crc kubenswrapper[4813]: I1007 20:05:52.109840 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.079443 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.080154 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.080206 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.080968 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.081035 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" gracePeriod=600 Oct 07 20:06:22 crc kubenswrapper[4813]: E1007 20:06:22.206699 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.682959 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" exitCode=0 Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.683054 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87"} Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.683860 4813 scope.go:117] "RemoveContainer" containerID="ada930491aada224a82dc8e8b99f10d8a68a21da97fc7faee9ef3c114a69a7ee" Oct 07 20:06:22 crc kubenswrapper[4813]: I1007 20:06:22.684968 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:06:22 crc kubenswrapper[4813]: E1007 20:06:22.685582 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:06:36 crc kubenswrapper[4813]: I1007 20:06:36.603028 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:06:36 crc kubenswrapper[4813]: E1007 20:06:36.603619 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:06:50 crc kubenswrapper[4813]: I1007 20:06:50.603284 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:06:50 crc kubenswrapper[4813]: E1007 20:06:50.604144 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:07:05 crc kubenswrapper[4813]: I1007 20:07:05.603931 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:07:05 crc kubenswrapper[4813]: E1007 20:07:05.605536 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:07:16 crc kubenswrapper[4813]: I1007 20:07:16.300337 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" event={"ID":"07dc752d-c126-4085-9367-ca8bcee2c1ec","Type":"ContainerDied","Data":"fe93d7f46e16da7ded95e354d8b348f08562cd83081f81a0c17968e98049b3d5"} Oct 07 20:07:16 crc kubenswrapper[4813]: I1007 20:07:16.300228 4813 generic.go:334] "Generic (PLEG): container finished" podID="07dc752d-c126-4085-9367-ca8bcee2c1ec" containerID="fe93d7f46e16da7ded95e354d8b348f08562cd83081f81a0c17968e98049b3d5" exitCode=0 Oct 07 20:07:16 crc kubenswrapper[4813]: I1007 20:07:16.603739 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:07:16 crc kubenswrapper[4813]: E1007 20:07:16.604197 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.909964 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.969966 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-inventory\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.970101 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-0\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.970162 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ssh-key\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.970200 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-telemetry-combined-ca-bundle\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.970243 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-1\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.970270 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-2\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.970401 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8zm2\" (UniqueName: \"kubernetes.io/projected/07dc752d-c126-4085-9367-ca8bcee2c1ec-kube-api-access-m8zm2\") pod \"07dc752d-c126-4085-9367-ca8bcee2c1ec\" (UID: \"07dc752d-c126-4085-9367-ca8bcee2c1ec\") " Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.983079 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07dc752d-c126-4085-9367-ca8bcee2c1ec-kube-api-access-m8zm2" (OuterVolumeSpecName: "kube-api-access-m8zm2") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "kube-api-access-m8zm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.996383 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:07:17 crc kubenswrapper[4813]: I1007 20:07:17.999733 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.000431 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-inventory" (OuterVolumeSpecName: "inventory") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.001727 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.026540 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.028513 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "07dc752d-c126-4085-9367-ca8bcee2c1ec" (UID: "07dc752d-c126-4085-9367-ca8bcee2c1ec"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072279 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072441 4813 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072543 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072632 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072723 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8zm2\" (UniqueName: \"kubernetes.io/projected/07dc752d-c126-4085-9367-ca8bcee2c1ec-kube-api-access-m8zm2\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072811 4813 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-inventory\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.072894 4813 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/07dc752d-c126-4085-9367-ca8bcee2c1ec-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.326226 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" event={"ID":"07dc752d-c126-4085-9367-ca8bcee2c1ec","Type":"ContainerDied","Data":"f31b3d8acc1062dacbd1b71bf13334b679a0d323c466cde5b6a9660b0fded1b2"} Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.326262 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f31b3d8acc1062dacbd1b71bf13334b679a0d323c466cde5b6a9660b0fded1b2" Oct 07 20:07:18 crc kubenswrapper[4813]: I1007 20:07:18.326348 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7" Oct 07 20:07:28 crc kubenswrapper[4813]: I1007 20:07:28.604372 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:07:28 crc kubenswrapper[4813]: E1007 20:07:28.605305 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:07:40 crc kubenswrapper[4813]: I1007 20:07:40.602790 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:07:40 crc kubenswrapper[4813]: E1007 20:07:40.604984 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:07:55 crc kubenswrapper[4813]: I1007 20:07:55.603232 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:07:55 crc kubenswrapper[4813]: E1007 20:07:55.604214 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:08:06 crc kubenswrapper[4813]: I1007 20:08:06.602607 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:08:06 crc kubenswrapper[4813]: E1007 20:08:06.603456 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:08:17 crc kubenswrapper[4813]: I1007 20:08:17.602910 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:08:17 crc kubenswrapper[4813]: E1007 20:08:17.605553 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.112250 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 20:08:21 crc kubenswrapper[4813]: E1007 20:08:21.113077 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="extract-utilities" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.113093 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="extract-utilities" Oct 07 20:08:21 crc kubenswrapper[4813]: E1007 20:08:21.113109 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="extract-content" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.113118 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="extract-content" Oct 07 20:08:21 crc kubenswrapper[4813]: E1007 20:08:21.113136 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="registry-server" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.113144 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="registry-server" Oct 07 20:08:21 crc kubenswrapper[4813]: E1007 20:08:21.113164 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07dc752d-c126-4085-9367-ca8bcee2c1ec" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.113173 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="07dc752d-c126-4085-9367-ca8bcee2c1ec" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.113424 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="07dc752d-c126-4085-9367-ca8bcee2c1ec" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.113439 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba0a7bec-00b5-4299-b89a-d23386ca6e93" containerName="registry-server" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.114149 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.122044 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-p6n6j" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.122168 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.122504 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.140037 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.146189 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.287577 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.287896 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.287930 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.288009 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.288034 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.288134 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzwbm\" (UniqueName: \"kubernetes.io/projected/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-kube-api-access-nzwbm\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.288186 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.288250 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.288273 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-config-data\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389445 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389498 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389539 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389584 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389663 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzwbm\" (UniqueName: \"kubernetes.io/projected/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-kube-api-access-nzwbm\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389719 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389782 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389828 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-config-data\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.389889 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.390567 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.390642 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.391067 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.391349 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-config-data\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.391613 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.401620 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.401741 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.407918 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.409783 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzwbm\" (UniqueName: \"kubernetes.io/projected/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-kube-api-access-nzwbm\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.432095 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"tempest-tests-tempest\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.466542 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 20:08:21 crc kubenswrapper[4813]: I1007 20:08:21.950041 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 07 20:08:21 crc kubenswrapper[4813]: W1007 20:08:21.950240 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a9b2bd1_18d3_4b04_bb13_a9e0ecd0c136.slice/crio-230cff4ef4fea6c84def1fdee0bb4eae74f6d6e1a3758a468a5113f904ce0c2b WatchSource:0}: Error finding container 230cff4ef4fea6c84def1fdee0bb4eae74f6d6e1a3758a468a5113f904ce0c2b: Status 404 returned error can't find the container with id 230cff4ef4fea6c84def1fdee0bb4eae74f6d6e1a3758a468a5113f904ce0c2b Oct 07 20:08:22 crc kubenswrapper[4813]: I1007 20:08:22.115255 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136","Type":"ContainerStarted","Data":"230cff4ef4fea6c84def1fdee0bb4eae74f6d6e1a3758a468a5113f904ce0c2b"} Oct 07 20:08:31 crc kubenswrapper[4813]: I1007 20:08:31.603151 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:08:31 crc kubenswrapper[4813]: E1007 20:08:31.603884 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:08:46 crc kubenswrapper[4813]: I1007 20:08:46.603696 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:08:46 crc kubenswrapper[4813]: E1007 20:08:46.605806 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:08:50 crc kubenswrapper[4813]: E1007 20:08:50.610970 4813 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 07 20:08:50 crc kubenswrapper[4813]: E1007 20:08:50.611790 4813 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nzwbm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 07 20:08:50 crc kubenswrapper[4813]: E1007 20:08:50.613140 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" Oct 07 20:08:51 crc kubenswrapper[4813]: E1007 20:08:51.457474 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" Oct 07 20:08:57 crc kubenswrapper[4813]: I1007 20:08:57.603979 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:08:57 crc kubenswrapper[4813]: E1007 20:08:57.605935 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:09:07 crc kubenswrapper[4813]: I1007 20:09:07.084528 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 07 20:09:08 crc kubenswrapper[4813]: I1007 20:09:08.661533 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136","Type":"ContainerStarted","Data":"bc0ad3b3ef13b1b9a0c280b9a982402955af3d850ed84a536ff1d087840fec09"} Oct 07 20:09:08 crc kubenswrapper[4813]: I1007 20:09:08.688551 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.562089603 podStartE2EDuration="48.68852837s" podCreationTimestamp="2025-10-07 20:08:20 +0000 UTC" firstStartedPulling="2025-10-07 20:08:21.952605678 +0000 UTC m=+3028.030861319" lastFinishedPulling="2025-10-07 20:09:07.079044465 +0000 UTC m=+3073.157300086" observedRunningTime="2025-10-07 20:09:08.684627498 +0000 UTC m=+3074.762883109" watchObservedRunningTime="2025-10-07 20:09:08.68852837 +0000 UTC m=+3074.766783981" Oct 07 20:09:12 crc kubenswrapper[4813]: I1007 20:09:12.602814 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:09:12 crc kubenswrapper[4813]: E1007 20:09:12.604500 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:09:27 crc kubenswrapper[4813]: I1007 20:09:27.603453 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:09:27 crc kubenswrapper[4813]: E1007 20:09:27.604092 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:09:41 crc kubenswrapper[4813]: I1007 20:09:41.602901 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:09:41 crc kubenswrapper[4813]: E1007 20:09:41.603767 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:09:53 crc kubenswrapper[4813]: I1007 20:09:53.602944 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:09:53 crc kubenswrapper[4813]: E1007 20:09:53.603641 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:10:08 crc kubenswrapper[4813]: I1007 20:10:08.602783 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:10:08 crc kubenswrapper[4813]: E1007 20:10:08.603379 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:10:21 crc kubenswrapper[4813]: I1007 20:10:21.602995 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:10:21 crc kubenswrapper[4813]: E1007 20:10:21.603718 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:10:32 crc kubenswrapper[4813]: I1007 20:10:32.602268 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:10:32 crc kubenswrapper[4813]: E1007 20:10:32.602934 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:10:44 crc kubenswrapper[4813]: I1007 20:10:44.614694 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:10:44 crc kubenswrapper[4813]: E1007 20:10:44.615708 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:10:55 crc kubenswrapper[4813]: I1007 20:10:55.603239 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:10:55 crc kubenswrapper[4813]: E1007 20:10:55.604285 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:11:06 crc kubenswrapper[4813]: I1007 20:11:06.602357 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:11:06 crc kubenswrapper[4813]: E1007 20:11:06.602974 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:11:21 crc kubenswrapper[4813]: I1007 20:11:21.603092 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:11:21 crc kubenswrapper[4813]: E1007 20:11:21.604434 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.120906 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f44th"] Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.131177 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.157940 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b92zv\" (UniqueName: \"kubernetes.io/projected/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-kube-api-access-b92zv\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.158015 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-catalog-content\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.158042 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-utilities\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.174014 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f44th"] Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.261864 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b92zv\" (UniqueName: \"kubernetes.io/projected/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-kube-api-access-b92zv\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.261935 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-catalog-content\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.261958 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-utilities\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.262675 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-utilities\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.262994 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-catalog-content\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.298970 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b92zv\" (UniqueName: \"kubernetes.io/projected/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-kube-api-access-b92zv\") pod \"redhat-operators-f44th\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.455205 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.705247 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dp5d7"] Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.707681 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.719569 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dp5d7"] Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.773360 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-catalog-content\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.773410 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z5kc\" (UniqueName: \"kubernetes.io/projected/91619c2a-0630-4182-b168-aef965f19c46-kube-api-access-2z5kc\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.773471 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-utilities\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.875171 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-utilities\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.875295 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-catalog-content\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.875345 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z5kc\" (UniqueName: \"kubernetes.io/projected/91619c2a-0630-4182-b168-aef965f19c46-kube-api-access-2z5kc\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.875884 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-utilities\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.875905 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-catalog-content\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:30 crc kubenswrapper[4813]: I1007 20:11:30.895014 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z5kc\" (UniqueName: \"kubernetes.io/projected/91619c2a-0630-4182-b168-aef965f19c46-kube-api-access-2z5kc\") pod \"certified-operators-dp5d7\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:31 crc kubenswrapper[4813]: I1007 20:11:31.046187 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:31 crc kubenswrapper[4813]: I1007 20:11:31.839392 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f44th"] Oct 07 20:11:31 crc kubenswrapper[4813]: I1007 20:11:31.916156 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dp5d7"] Oct 07 20:11:31 crc kubenswrapper[4813]: I1007 20:11:31.969134 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerStarted","Data":"be3ded65041aba629b3bfb58e0a10a318ae44e60c7262c4b376b50e3fa32c716"} Oct 07 20:11:31 crc kubenswrapper[4813]: I1007 20:11:31.973116 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerStarted","Data":"89dc0e27164562cbfb684b9364550cc0dc91570826997e8145509ef651b74540"} Oct 07 20:11:32 crc kubenswrapper[4813]: I1007 20:11:32.984832 4813 generic.go:334] "Generic (PLEG): container finished" podID="91619c2a-0630-4182-b168-aef965f19c46" containerID="e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea" exitCode=0 Oct 07 20:11:32 crc kubenswrapper[4813]: I1007 20:11:32.984920 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerDied","Data":"e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea"} Oct 07 20:11:32 crc kubenswrapper[4813]: I1007 20:11:32.987489 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 20:11:32 crc kubenswrapper[4813]: I1007 20:11:32.987751 4813 generic.go:334] "Generic (PLEG): container finished" podID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerID="27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab" exitCode=0 Oct 07 20:11:32 crc kubenswrapper[4813]: I1007 20:11:32.987799 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerDied","Data":"27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab"} Oct 07 20:11:33 crc kubenswrapper[4813]: I1007 20:11:33.602592 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:11:33 crc kubenswrapper[4813]: I1007 20:11:33.998704 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerStarted","Data":"2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57"} Oct 07 20:11:34 crc kubenswrapper[4813]: I1007 20:11:34.002368 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerStarted","Data":"e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f"} Oct 07 20:11:34 crc kubenswrapper[4813]: I1007 20:11:34.005636 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"219803f8ae984cfeee72a9dfecefa72be26401a3ec358d9eeeaba0c71ada998b"} Oct 07 20:11:36 crc kubenswrapper[4813]: I1007 20:11:36.026179 4813 generic.go:334] "Generic (PLEG): container finished" podID="91619c2a-0630-4182-b168-aef965f19c46" containerID="2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57" exitCode=0 Oct 07 20:11:36 crc kubenswrapper[4813]: I1007 20:11:36.026275 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerDied","Data":"2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57"} Oct 07 20:11:37 crc kubenswrapper[4813]: I1007 20:11:37.040584 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerStarted","Data":"5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6"} Oct 07 20:11:37 crc kubenswrapper[4813]: I1007 20:11:37.064558 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dp5d7" podStartSLOduration=3.474559965 podStartE2EDuration="7.064535769s" podCreationTimestamp="2025-10-07 20:11:30 +0000 UTC" firstStartedPulling="2025-10-07 20:11:32.98723062 +0000 UTC m=+3219.065486231" lastFinishedPulling="2025-10-07 20:11:36.577206424 +0000 UTC m=+3222.655462035" observedRunningTime="2025-10-07 20:11:37.056918901 +0000 UTC m=+3223.135174502" watchObservedRunningTime="2025-10-07 20:11:37.064535769 +0000 UTC m=+3223.142791380" Oct 07 20:11:38 crc kubenswrapper[4813]: I1007 20:11:38.052522 4813 generic.go:334] "Generic (PLEG): container finished" podID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerID="e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f" exitCode=0 Oct 07 20:11:38 crc kubenswrapper[4813]: I1007 20:11:38.052610 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerDied","Data":"e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f"} Oct 07 20:11:39 crc kubenswrapper[4813]: I1007 20:11:39.065198 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerStarted","Data":"3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065"} Oct 07 20:11:39 crc kubenswrapper[4813]: I1007 20:11:39.089675 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f44th" podStartSLOduration=3.432449035 podStartE2EDuration="9.08965439s" podCreationTimestamp="2025-10-07 20:11:30 +0000 UTC" firstStartedPulling="2025-10-07 20:11:32.990763361 +0000 UTC m=+3219.069018972" lastFinishedPulling="2025-10-07 20:11:38.647968716 +0000 UTC m=+3224.726224327" observedRunningTime="2025-10-07 20:11:39.082919356 +0000 UTC m=+3225.161174967" watchObservedRunningTime="2025-10-07 20:11:39.08965439 +0000 UTC m=+3225.167910011" Oct 07 20:11:40 crc kubenswrapper[4813]: I1007 20:11:40.455513 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:40 crc kubenswrapper[4813]: I1007 20:11:40.456033 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:11:41 crc kubenswrapper[4813]: I1007 20:11:41.046380 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:41 crc kubenswrapper[4813]: I1007 20:11:41.046445 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:41 crc kubenswrapper[4813]: I1007 20:11:41.519632 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-f44th" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="registry-server" probeResult="failure" output=< Oct 07 20:11:41 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:11:41 crc kubenswrapper[4813]: > Oct 07 20:11:42 crc kubenswrapper[4813]: I1007 20:11:42.105791 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-dp5d7" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="registry-server" probeResult="failure" output=< Oct 07 20:11:42 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:11:42 crc kubenswrapper[4813]: > Oct 07 20:11:51 crc kubenswrapper[4813]: I1007 20:11:51.095312 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:51 crc kubenswrapper[4813]: I1007 20:11:51.138421 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:51 crc kubenswrapper[4813]: I1007 20:11:51.507983 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-f44th" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="registry-server" probeResult="failure" output=< Oct 07 20:11:51 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:11:51 crc kubenswrapper[4813]: > Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.104911 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dp5d7"] Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.113977 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dp5d7" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="registry-server" containerID="cri-o://5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6" gracePeriod=2 Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.907579 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.943801 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z5kc\" (UniqueName: \"kubernetes.io/projected/91619c2a-0630-4182-b168-aef965f19c46-kube-api-access-2z5kc\") pod \"91619c2a-0630-4182-b168-aef965f19c46\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.944209 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-utilities\") pod \"91619c2a-0630-4182-b168-aef965f19c46\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.944278 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-catalog-content\") pod \"91619c2a-0630-4182-b168-aef965f19c46\" (UID: \"91619c2a-0630-4182-b168-aef965f19c46\") " Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.947694 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-utilities" (OuterVolumeSpecName: "utilities") pod "91619c2a-0630-4182-b168-aef965f19c46" (UID: "91619c2a-0630-4182-b168-aef965f19c46"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:11:54 crc kubenswrapper[4813]: I1007 20:11:54.954589 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91619c2a-0630-4182-b168-aef965f19c46-kube-api-access-2z5kc" (OuterVolumeSpecName: "kube-api-access-2z5kc") pod "91619c2a-0630-4182-b168-aef965f19c46" (UID: "91619c2a-0630-4182-b168-aef965f19c46"). InnerVolumeSpecName "kube-api-access-2z5kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.008719 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91619c2a-0630-4182-b168-aef965f19c46" (UID: "91619c2a-0630-4182-b168-aef965f19c46"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.046678 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.046713 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91619c2a-0630-4182-b168-aef965f19c46-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.046725 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z5kc\" (UniqueName: \"kubernetes.io/projected/91619c2a-0630-4182-b168-aef965f19c46-kube-api-access-2z5kc\") on node \"crc\" DevicePath \"\"" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.237463 4813 generic.go:334] "Generic (PLEG): container finished" podID="91619c2a-0630-4182-b168-aef965f19c46" containerID="5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6" exitCode=0 Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.237622 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerDied","Data":"5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6"} Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.237855 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dp5d7" event={"ID":"91619c2a-0630-4182-b168-aef965f19c46","Type":"ContainerDied","Data":"be3ded65041aba629b3bfb58e0a10a318ae44e60c7262c4b376b50e3fa32c716"} Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.237882 4813 scope.go:117] "RemoveContainer" containerID="5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.237720 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dp5d7" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.270659 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dp5d7"] Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.271518 4813 scope.go:117] "RemoveContainer" containerID="2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.280237 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dp5d7"] Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.300180 4813 scope.go:117] "RemoveContainer" containerID="e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.349343 4813 scope.go:117] "RemoveContainer" containerID="5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6" Oct 07 20:11:55 crc kubenswrapper[4813]: E1007 20:11:55.354272 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6\": container with ID starting with 5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6 not found: ID does not exist" containerID="5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.355485 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6"} err="failed to get container status \"5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6\": rpc error: code = NotFound desc = could not find container \"5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6\": container with ID starting with 5e2f67b74eea9652271be0353aef3593f1ef7ad581727faf4771e095bae99cb6 not found: ID does not exist" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.355520 4813 scope.go:117] "RemoveContainer" containerID="2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57" Oct 07 20:11:55 crc kubenswrapper[4813]: E1007 20:11:55.355900 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57\": container with ID starting with 2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57 not found: ID does not exist" containerID="2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.355945 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57"} err="failed to get container status \"2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57\": rpc error: code = NotFound desc = could not find container \"2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57\": container with ID starting with 2e9238b960ce1baf3dfedc28c71db63ee666a01fda3c8bc342d68969a9c21c57 not found: ID does not exist" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.355973 4813 scope.go:117] "RemoveContainer" containerID="e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea" Oct 07 20:11:55 crc kubenswrapper[4813]: E1007 20:11:55.356506 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea\": container with ID starting with e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea not found: ID does not exist" containerID="e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea" Oct 07 20:11:55 crc kubenswrapper[4813]: I1007 20:11:55.356532 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea"} err="failed to get container status \"e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea\": rpc error: code = NotFound desc = could not find container \"e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea\": container with ID starting with e2a5a8e52bb947cf703a3f75c49c5b47c67d792dead06f530e3a6c59ecf158ea not found: ID does not exist" Oct 07 20:11:56 crc kubenswrapper[4813]: I1007 20:11:56.619097 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91619c2a-0630-4182-b168-aef965f19c46" path="/var/lib/kubelet/pods/91619c2a-0630-4182-b168-aef965f19c46/volumes" Oct 07 20:12:00 crc kubenswrapper[4813]: I1007 20:12:00.540102 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:12:00 crc kubenswrapper[4813]: I1007 20:12:00.614005 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:12:01 crc kubenswrapper[4813]: I1007 20:12:01.914775 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f44th"] Oct 07 20:12:02 crc kubenswrapper[4813]: I1007 20:12:02.314354 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f44th" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="registry-server" containerID="cri-o://3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065" gracePeriod=2 Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:02.998654 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.122034 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-utilities\") pod \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.122100 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-catalog-content\") pod \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.122210 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b92zv\" (UniqueName: \"kubernetes.io/projected/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-kube-api-access-b92zv\") pod \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\" (UID: \"fc370f73-fb8e-47dc-b0a2-ac48c4c70529\") " Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.123539 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-utilities" (OuterVolumeSpecName: "utilities") pod "fc370f73-fb8e-47dc-b0a2-ac48c4c70529" (UID: "fc370f73-fb8e-47dc-b0a2-ac48c4c70529"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.130447 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-kube-api-access-b92zv" (OuterVolumeSpecName: "kube-api-access-b92zv") pod "fc370f73-fb8e-47dc-b0a2-ac48c4c70529" (UID: "fc370f73-fb8e-47dc-b0a2-ac48c4c70529"). InnerVolumeSpecName "kube-api-access-b92zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.225635 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b92zv\" (UniqueName: \"kubernetes.io/projected/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-kube-api-access-b92zv\") on node \"crc\" DevicePath \"\"" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.225663 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.226068 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc370f73-fb8e-47dc-b0a2-ac48c4c70529" (UID: "fc370f73-fb8e-47dc-b0a2-ac48c4c70529"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.322937 4813 generic.go:334] "Generic (PLEG): container finished" podID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerID="3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065" exitCode=0 Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.322970 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerDied","Data":"3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065"} Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.322995 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f44th" event={"ID":"fc370f73-fb8e-47dc-b0a2-ac48c4c70529","Type":"ContainerDied","Data":"89dc0e27164562cbfb684b9364550cc0dc91570826997e8145509ef651b74540"} Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.323011 4813 scope.go:117] "RemoveContainer" containerID="3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.323127 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f44th" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.327205 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc370f73-fb8e-47dc-b0a2-ac48c4c70529-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.356127 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f44th"] Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.358138 4813 scope.go:117] "RemoveContainer" containerID="e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.384697 4813 scope.go:117] "RemoveContainer" containerID="27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.387791 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f44th"] Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.425804 4813 scope.go:117] "RemoveContainer" containerID="3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065" Oct 07 20:12:03 crc kubenswrapper[4813]: E1007 20:12:03.426269 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065\": container with ID starting with 3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065 not found: ID does not exist" containerID="3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.426315 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065"} err="failed to get container status \"3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065\": rpc error: code = NotFound desc = could not find container \"3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065\": container with ID starting with 3a7482e39e2bf6b78f26c6be59af4b758bac2db59fd471d9cb9d2fb1dec41065 not found: ID does not exist" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.426353 4813 scope.go:117] "RemoveContainer" containerID="e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f" Oct 07 20:12:03 crc kubenswrapper[4813]: E1007 20:12:03.426768 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f\": container with ID starting with e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f not found: ID does not exist" containerID="e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.426798 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f"} err="failed to get container status \"e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f\": rpc error: code = NotFound desc = could not find container \"e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f\": container with ID starting with e81db65fa62807bf2b96fb92a41ef22a33e939d83bb205f5626a5aee41b5f27f not found: ID does not exist" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.426820 4813 scope.go:117] "RemoveContainer" containerID="27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab" Oct 07 20:12:03 crc kubenswrapper[4813]: E1007 20:12:03.427214 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab\": container with ID starting with 27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab not found: ID does not exist" containerID="27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab" Oct 07 20:12:03 crc kubenswrapper[4813]: I1007 20:12:03.427253 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab"} err="failed to get container status \"27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab\": rpc error: code = NotFound desc = could not find container \"27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab\": container with ID starting with 27405add19609b16c25559909c1d96a30b0ddf51232bb5e39f05c45a10299aab not found: ID does not exist" Oct 07 20:12:04 crc kubenswrapper[4813]: I1007 20:12:04.617217 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" path="/var/lib/kubelet/pods/fc370f73-fb8e-47dc-b0a2-ac48c4c70529/volumes" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.738136 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5nhdd"] Oct 07 20:13:03 crc kubenswrapper[4813]: E1007 20:13:03.739494 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="registry-server" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.739523 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="registry-server" Oct 07 20:13:03 crc kubenswrapper[4813]: E1007 20:13:03.739552 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="extract-content" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.739565 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="extract-content" Oct 07 20:13:03 crc kubenswrapper[4813]: E1007 20:13:03.739590 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="extract-utilities" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.739603 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="extract-utilities" Oct 07 20:13:03 crc kubenswrapper[4813]: E1007 20:13:03.739624 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="extract-content" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.739636 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="extract-content" Oct 07 20:13:03 crc kubenswrapper[4813]: E1007 20:13:03.739665 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="registry-server" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.739677 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="registry-server" Oct 07 20:13:03 crc kubenswrapper[4813]: E1007 20:13:03.739708 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="extract-utilities" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.739720 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="extract-utilities" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.740065 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="91619c2a-0630-4182-b168-aef965f19c46" containerName="registry-server" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.740094 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc370f73-fb8e-47dc-b0a2-ac48c4c70529" containerName="registry-server" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.743212 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.748446 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5nhdd"] Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.888953 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-utilities\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.889054 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4crl\" (UniqueName: \"kubernetes.io/projected/b793db72-3d24-48a5-b31d-e2c98f60a123-kube-api-access-t4crl\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.889074 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-catalog-content\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.991051 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-utilities\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.991151 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4crl\" (UniqueName: \"kubernetes.io/projected/b793db72-3d24-48a5-b31d-e2c98f60a123-kube-api-access-t4crl\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.991172 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-catalog-content\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.991853 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-catalog-content\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:03 crc kubenswrapper[4813]: I1007 20:13:03.991881 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-utilities\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:04 crc kubenswrapper[4813]: I1007 20:13:04.013545 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4crl\" (UniqueName: \"kubernetes.io/projected/b793db72-3d24-48a5-b31d-e2c98f60a123-kube-api-access-t4crl\") pod \"community-operators-5nhdd\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:04 crc kubenswrapper[4813]: I1007 20:13:04.109064 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:04 crc kubenswrapper[4813]: I1007 20:13:04.655149 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5nhdd"] Oct 07 20:13:04 crc kubenswrapper[4813]: I1007 20:13:04.905185 4813 generic.go:334] "Generic (PLEG): container finished" podID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerID="e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7" exitCode=0 Oct 07 20:13:04 crc kubenswrapper[4813]: I1007 20:13:04.905238 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerDied","Data":"e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7"} Oct 07 20:13:04 crc kubenswrapper[4813]: I1007 20:13:04.905288 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerStarted","Data":"e785a6b3d66a6aefce5502929c672fc8fc4ed444fca441e63d3bc1fab26bbb02"} Oct 07 20:13:05 crc kubenswrapper[4813]: I1007 20:13:05.915377 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerStarted","Data":"5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef"} Oct 07 20:13:06 crc kubenswrapper[4813]: I1007 20:13:06.926186 4813 generic.go:334] "Generic (PLEG): container finished" podID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerID="5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef" exitCode=0 Oct 07 20:13:06 crc kubenswrapper[4813]: I1007 20:13:06.926242 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerDied","Data":"5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef"} Oct 07 20:13:07 crc kubenswrapper[4813]: I1007 20:13:07.942721 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerStarted","Data":"d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3"} Oct 07 20:13:07 crc kubenswrapper[4813]: I1007 20:13:07.966358 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5nhdd" podStartSLOduration=2.297385485 podStartE2EDuration="4.966310096s" podCreationTimestamp="2025-10-07 20:13:03 +0000 UTC" firstStartedPulling="2025-10-07 20:13:04.906858601 +0000 UTC m=+3310.985114212" lastFinishedPulling="2025-10-07 20:13:07.575783202 +0000 UTC m=+3313.654038823" observedRunningTime="2025-10-07 20:13:07.958230957 +0000 UTC m=+3314.036486568" watchObservedRunningTime="2025-10-07 20:13:07.966310096 +0000 UTC m=+3314.044565727" Oct 07 20:13:14 crc kubenswrapper[4813]: I1007 20:13:14.113660 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:14 crc kubenswrapper[4813]: I1007 20:13:14.114405 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:14 crc kubenswrapper[4813]: I1007 20:13:14.212313 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:15 crc kubenswrapper[4813]: I1007 20:13:15.088002 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:15 crc kubenswrapper[4813]: I1007 20:13:15.162680 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5nhdd"] Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.046108 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5nhdd" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="registry-server" containerID="cri-o://d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3" gracePeriod=2 Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.580960 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.669994 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-catalog-content\") pod \"b793db72-3d24-48a5-b31d-e2c98f60a123\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.670358 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-utilities\") pod \"b793db72-3d24-48a5-b31d-e2c98f60a123\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.670550 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4crl\" (UniqueName: \"kubernetes.io/projected/b793db72-3d24-48a5-b31d-e2c98f60a123-kube-api-access-t4crl\") pod \"b793db72-3d24-48a5-b31d-e2c98f60a123\" (UID: \"b793db72-3d24-48a5-b31d-e2c98f60a123\") " Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.670910 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-utilities" (OuterVolumeSpecName: "utilities") pod "b793db72-3d24-48a5-b31d-e2c98f60a123" (UID: "b793db72-3d24-48a5-b31d-e2c98f60a123"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.672114 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.676526 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b793db72-3d24-48a5-b31d-e2c98f60a123-kube-api-access-t4crl" (OuterVolumeSpecName: "kube-api-access-t4crl") pod "b793db72-3d24-48a5-b31d-e2c98f60a123" (UID: "b793db72-3d24-48a5-b31d-e2c98f60a123"). InnerVolumeSpecName "kube-api-access-t4crl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.717176 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b793db72-3d24-48a5-b31d-e2c98f60a123" (UID: "b793db72-3d24-48a5-b31d-e2c98f60a123"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.773357 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4crl\" (UniqueName: \"kubernetes.io/projected/b793db72-3d24-48a5-b31d-e2c98f60a123-kube-api-access-t4crl\") on node \"crc\" DevicePath \"\"" Oct 07 20:13:17 crc kubenswrapper[4813]: I1007 20:13:17.773389 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b793db72-3d24-48a5-b31d-e2c98f60a123-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.055995 4813 generic.go:334] "Generic (PLEG): container finished" podID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerID="d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3" exitCode=0 Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.056042 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerDied","Data":"d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3"} Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.056071 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5nhdd" event={"ID":"b793db72-3d24-48a5-b31d-e2c98f60a123","Type":"ContainerDied","Data":"e785a6b3d66a6aefce5502929c672fc8fc4ed444fca441e63d3bc1fab26bbb02"} Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.056092 4813 scope.go:117] "RemoveContainer" containerID="d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.056212 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5nhdd" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.093061 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5nhdd"] Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.095524 4813 scope.go:117] "RemoveContainer" containerID="5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.103249 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5nhdd"] Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.142630 4813 scope.go:117] "RemoveContainer" containerID="e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.168125 4813 scope.go:117] "RemoveContainer" containerID="d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3" Oct 07 20:13:18 crc kubenswrapper[4813]: E1007 20:13:18.168631 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3\": container with ID starting with d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3 not found: ID does not exist" containerID="d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.168672 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3"} err="failed to get container status \"d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3\": rpc error: code = NotFound desc = could not find container \"d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3\": container with ID starting with d2deb3986e52d40d633081c832dd90a95391c97023f8e24c534da6df815f03f3 not found: ID does not exist" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.168700 4813 scope.go:117] "RemoveContainer" containerID="5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef" Oct 07 20:13:18 crc kubenswrapper[4813]: E1007 20:13:18.169022 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef\": container with ID starting with 5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef not found: ID does not exist" containerID="5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.169052 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef"} err="failed to get container status \"5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef\": rpc error: code = NotFound desc = could not find container \"5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef\": container with ID starting with 5ca79ea2e92adf927ccbb61ad21ce9e5dcfd4a79132648ccd128dbb56452deef not found: ID does not exist" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.169071 4813 scope.go:117] "RemoveContainer" containerID="e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7" Oct 07 20:13:18 crc kubenswrapper[4813]: E1007 20:13:18.169554 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7\": container with ID starting with e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7 not found: ID does not exist" containerID="e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.169581 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7"} err="failed to get container status \"e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7\": rpc error: code = NotFound desc = could not find container \"e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7\": container with ID starting with e008617f8a5bf2a3a60edadb173acdd8d8b40ef647e125cf034bb1f776068fb7 not found: ID does not exist" Oct 07 20:13:18 crc kubenswrapper[4813]: I1007 20:13:18.613456 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" path="/var/lib/kubelet/pods/b793db72-3d24-48a5-b31d-e2c98f60a123/volumes" Oct 07 20:13:52 crc kubenswrapper[4813]: I1007 20:13:52.079069 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:13:52 crc kubenswrapper[4813]: I1007 20:13:52.080655 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:14:22 crc kubenswrapper[4813]: I1007 20:14:22.078513 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:14:22 crc kubenswrapper[4813]: I1007 20:14:22.079054 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.078834 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.079351 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.079399 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.079871 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"219803f8ae984cfeee72a9dfecefa72be26401a3ec358d9eeeaba0c71ada998b"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.079921 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://219803f8ae984cfeee72a9dfecefa72be26401a3ec358d9eeeaba0c71ada998b" gracePeriod=600 Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.994055 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="219803f8ae984cfeee72a9dfecefa72be26401a3ec358d9eeeaba0c71ada998b" exitCode=0 Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.994142 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"219803f8ae984cfeee72a9dfecefa72be26401a3ec358d9eeeaba0c71ada998b"} Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.994771 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef"} Oct 07 20:14:52 crc kubenswrapper[4813]: I1007 20:14:52.994809 4813 scope.go:117] "RemoveContainer" containerID="dc73b7d3cdff8f554ddcc88f5fba3d43484b2a90b3d0b7c30c72f738418a6f87" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.207883 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk"] Oct 07 20:15:00 crc kubenswrapper[4813]: E1007 20:15:00.209445 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="registry-server" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.209523 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="registry-server" Oct 07 20:15:00 crc kubenswrapper[4813]: E1007 20:15:00.209596 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="extract-content" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.209641 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="extract-content" Oct 07 20:15:00 crc kubenswrapper[4813]: E1007 20:15:00.209718 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="extract-utilities" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.209770 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="extract-utilities" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.209994 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b793db72-3d24-48a5-b31d-e2c98f60a123" containerName="registry-server" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.210700 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.220338 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk"] Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.221007 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.221016 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.341570 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5wcr\" (UniqueName: \"kubernetes.io/projected/48d41f38-3b1a-4041-ada9-9f16769e6508-kube-api-access-b5wcr\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.341934 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/48d41f38-3b1a-4041-ada9-9f16769e6508-secret-volume\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.342237 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/48d41f38-3b1a-4041-ada9-9f16769e6508-config-volume\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.443278 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/48d41f38-3b1a-4041-ada9-9f16769e6508-config-volume\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.443430 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5wcr\" (UniqueName: \"kubernetes.io/projected/48d41f38-3b1a-4041-ada9-9f16769e6508-kube-api-access-b5wcr\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.443534 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/48d41f38-3b1a-4041-ada9-9f16769e6508-secret-volume\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.444396 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/48d41f38-3b1a-4041-ada9-9f16769e6508-config-volume\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.452002 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/48d41f38-3b1a-4041-ada9-9f16769e6508-secret-volume\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.462898 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5wcr\" (UniqueName: \"kubernetes.io/projected/48d41f38-3b1a-4041-ada9-9f16769e6508-kube-api-access-b5wcr\") pod \"collect-profiles-29331135-6t9hk\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.530542 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:00 crc kubenswrapper[4813]: I1007 20:15:00.991724 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk"] Oct 07 20:15:01 crc kubenswrapper[4813]: W1007 20:15:01.049228 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48d41f38_3b1a_4041_ada9_9f16769e6508.slice/crio-8270c69a82c110b1920a8eda274226c8f0371f6d1f0baecf03b0471e7f83d14d WatchSource:0}: Error finding container 8270c69a82c110b1920a8eda274226c8f0371f6d1f0baecf03b0471e7f83d14d: Status 404 returned error can't find the container with id 8270c69a82c110b1920a8eda274226c8f0371f6d1f0baecf03b0471e7f83d14d Oct 07 20:15:01 crc kubenswrapper[4813]: I1007 20:15:01.108434 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" event={"ID":"48d41f38-3b1a-4041-ada9-9f16769e6508","Type":"ContainerStarted","Data":"8270c69a82c110b1920a8eda274226c8f0371f6d1f0baecf03b0471e7f83d14d"} Oct 07 20:15:02 crc kubenswrapper[4813]: I1007 20:15:02.116967 4813 generic.go:334] "Generic (PLEG): container finished" podID="48d41f38-3b1a-4041-ada9-9f16769e6508" containerID="a318c32b4621f69601b59c4993eff6b27de8b0d4a3c91142e1bccd651cb5ff78" exitCode=0 Oct 07 20:15:02 crc kubenswrapper[4813]: I1007 20:15:02.117048 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" event={"ID":"48d41f38-3b1a-4041-ada9-9f16769e6508","Type":"ContainerDied","Data":"a318c32b4621f69601b59c4993eff6b27de8b0d4a3c91142e1bccd651cb5ff78"} Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.687585 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.810296 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/48d41f38-3b1a-4041-ada9-9f16769e6508-secret-volume\") pod \"48d41f38-3b1a-4041-ada9-9f16769e6508\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.810388 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/48d41f38-3b1a-4041-ada9-9f16769e6508-config-volume\") pod \"48d41f38-3b1a-4041-ada9-9f16769e6508\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.810486 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5wcr\" (UniqueName: \"kubernetes.io/projected/48d41f38-3b1a-4041-ada9-9f16769e6508-kube-api-access-b5wcr\") pod \"48d41f38-3b1a-4041-ada9-9f16769e6508\" (UID: \"48d41f38-3b1a-4041-ada9-9f16769e6508\") " Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.811565 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48d41f38-3b1a-4041-ada9-9f16769e6508-config-volume" (OuterVolumeSpecName: "config-volume") pod "48d41f38-3b1a-4041-ada9-9f16769e6508" (UID: "48d41f38-3b1a-4041-ada9-9f16769e6508"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.822048 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48d41f38-3b1a-4041-ada9-9f16769e6508-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "48d41f38-3b1a-4041-ada9-9f16769e6508" (UID: "48d41f38-3b1a-4041-ada9-9f16769e6508"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.829395 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48d41f38-3b1a-4041-ada9-9f16769e6508-kube-api-access-b5wcr" (OuterVolumeSpecName: "kube-api-access-b5wcr") pod "48d41f38-3b1a-4041-ada9-9f16769e6508" (UID: "48d41f38-3b1a-4041-ada9-9f16769e6508"). InnerVolumeSpecName "kube-api-access-b5wcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.912745 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5wcr\" (UniqueName: \"kubernetes.io/projected/48d41f38-3b1a-4041-ada9-9f16769e6508-kube-api-access-b5wcr\") on node \"crc\" DevicePath \"\"" Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.913012 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/48d41f38-3b1a-4041-ada9-9f16769e6508-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 20:15:03 crc kubenswrapper[4813]: I1007 20:15:03.913022 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/48d41f38-3b1a-4041-ada9-9f16769e6508-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 20:15:04 crc kubenswrapper[4813]: I1007 20:15:04.137753 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" event={"ID":"48d41f38-3b1a-4041-ada9-9f16769e6508","Type":"ContainerDied","Data":"8270c69a82c110b1920a8eda274226c8f0371f6d1f0baecf03b0471e7f83d14d"} Oct 07 20:15:04 crc kubenswrapper[4813]: I1007 20:15:04.137807 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331135-6t9hk" Oct 07 20:15:04 crc kubenswrapper[4813]: I1007 20:15:04.137810 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8270c69a82c110b1920a8eda274226c8f0371f6d1f0baecf03b0471e7f83d14d" Oct 07 20:15:04 crc kubenswrapper[4813]: I1007 20:15:04.772221 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc"] Oct 07 20:15:04 crc kubenswrapper[4813]: I1007 20:15:04.781605 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331090-lfhdc"] Oct 07 20:15:06 crc kubenswrapper[4813]: I1007 20:15:06.615432 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2217c78c-2018-4f7d-99eb-158ca2077830" path="/var/lib/kubelet/pods/2217c78c-2018-4f7d-99eb-158ca2077830/volumes" Oct 07 20:15:10 crc kubenswrapper[4813]: I1007 20:15:10.545170 4813 scope.go:117] "RemoveContainer" containerID="01460ba679d0c86e7a748ebe4c95fe1dc3064d488072901947b05d4e54dd1027" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.084584 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4kgll"] Oct 07 20:15:52 crc kubenswrapper[4813]: E1007 20:15:52.085807 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48d41f38-3b1a-4041-ada9-9f16769e6508" containerName="collect-profiles" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.085828 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="48d41f38-3b1a-4041-ada9-9f16769e6508" containerName="collect-profiles" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.086136 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="48d41f38-3b1a-4041-ada9-9f16769e6508" containerName="collect-profiles" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.088275 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.100233 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4kgll"] Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.232616 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrvdq\" (UniqueName: \"kubernetes.io/projected/b232485b-3f02-46aa-b939-3da7ae757414-kube-api-access-zrvdq\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.232698 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-utilities\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.233206 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-catalog-content\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.335188 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-catalog-content\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.335275 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrvdq\" (UniqueName: \"kubernetes.io/projected/b232485b-3f02-46aa-b939-3da7ae757414-kube-api-access-zrvdq\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.335353 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-utilities\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.335836 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-catalog-content\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.335856 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-utilities\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.355393 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrvdq\" (UniqueName: \"kubernetes.io/projected/b232485b-3f02-46aa-b939-3da7ae757414-kube-api-access-zrvdq\") pod \"redhat-marketplace-4kgll\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.412982 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:15:52 crc kubenswrapper[4813]: I1007 20:15:52.878126 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4kgll"] Oct 07 20:15:52 crc kubenswrapper[4813]: W1007 20:15:52.887035 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb232485b_3f02_46aa_b939_3da7ae757414.slice/crio-866dc63e63e2bce7c99605839543c7a6ddf5cdc9d34feddde8d9617a1bd69d4a WatchSource:0}: Error finding container 866dc63e63e2bce7c99605839543c7a6ddf5cdc9d34feddde8d9617a1bd69d4a: Status 404 returned error can't find the container with id 866dc63e63e2bce7c99605839543c7a6ddf5cdc9d34feddde8d9617a1bd69d4a Oct 07 20:15:53 crc kubenswrapper[4813]: I1007 20:15:53.617566 4813 generic.go:334] "Generic (PLEG): container finished" podID="b232485b-3f02-46aa-b939-3da7ae757414" containerID="cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820" exitCode=0 Oct 07 20:15:53 crc kubenswrapper[4813]: I1007 20:15:53.618196 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4kgll" event={"ID":"b232485b-3f02-46aa-b939-3da7ae757414","Type":"ContainerDied","Data":"cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820"} Oct 07 20:15:53 crc kubenswrapper[4813]: I1007 20:15:53.618235 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4kgll" event={"ID":"b232485b-3f02-46aa-b939-3da7ae757414","Type":"ContainerStarted","Data":"866dc63e63e2bce7c99605839543c7a6ddf5cdc9d34feddde8d9617a1bd69d4a"} Oct 07 20:15:55 crc kubenswrapper[4813]: I1007 20:15:55.638434 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4kgll" event={"ID":"b232485b-3f02-46aa-b939-3da7ae757414","Type":"ContainerDied","Data":"da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5"} Oct 07 20:15:55 crc kubenswrapper[4813]: I1007 20:15:55.638419 4813 generic.go:334] "Generic (PLEG): container finished" podID="b232485b-3f02-46aa-b939-3da7ae757414" containerID="da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5" exitCode=0 Oct 07 20:15:56 crc kubenswrapper[4813]: I1007 20:15:56.649649 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4kgll" event={"ID":"b232485b-3f02-46aa-b939-3da7ae757414","Type":"ContainerStarted","Data":"e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2"} Oct 07 20:16:02 crc kubenswrapper[4813]: I1007 20:16:02.414777 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:16:02 crc kubenswrapper[4813]: I1007 20:16:02.415549 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:16:02 crc kubenswrapper[4813]: I1007 20:16:02.473095 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:16:02 crc kubenswrapper[4813]: I1007 20:16:02.499462 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4kgll" podStartSLOduration=8.103755421 podStartE2EDuration="10.499438194s" podCreationTimestamp="2025-10-07 20:15:52 +0000 UTC" firstStartedPulling="2025-10-07 20:15:53.621134116 +0000 UTC m=+3479.699389727" lastFinishedPulling="2025-10-07 20:15:56.016816859 +0000 UTC m=+3482.095072500" observedRunningTime="2025-10-07 20:15:56.674950071 +0000 UTC m=+3482.753205682" watchObservedRunningTime="2025-10-07 20:16:02.499438194 +0000 UTC m=+3488.577693845" Oct 07 20:16:02 crc kubenswrapper[4813]: I1007 20:16:02.783208 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:16:02 crc kubenswrapper[4813]: I1007 20:16:02.844645 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4kgll"] Oct 07 20:16:04 crc kubenswrapper[4813]: I1007 20:16:04.723604 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4kgll" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="registry-server" containerID="cri-o://e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2" gracePeriod=2 Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.346361 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.441489 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-catalog-content\") pod \"b232485b-3f02-46aa-b939-3da7ae757414\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.441556 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrvdq\" (UniqueName: \"kubernetes.io/projected/b232485b-3f02-46aa-b939-3da7ae757414-kube-api-access-zrvdq\") pod \"b232485b-3f02-46aa-b939-3da7ae757414\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.441626 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-utilities\") pod \"b232485b-3f02-46aa-b939-3da7ae757414\" (UID: \"b232485b-3f02-46aa-b939-3da7ae757414\") " Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.447397 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-utilities" (OuterVolumeSpecName: "utilities") pod "b232485b-3f02-46aa-b939-3da7ae757414" (UID: "b232485b-3f02-46aa-b939-3da7ae757414"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.453179 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b232485b-3f02-46aa-b939-3da7ae757414-kube-api-access-zrvdq" (OuterVolumeSpecName: "kube-api-access-zrvdq") pod "b232485b-3f02-46aa-b939-3da7ae757414" (UID: "b232485b-3f02-46aa-b939-3da7ae757414"). InnerVolumeSpecName "kube-api-access-zrvdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.467436 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b232485b-3f02-46aa-b939-3da7ae757414" (UID: "b232485b-3f02-46aa-b939-3da7ae757414"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.543556 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrvdq\" (UniqueName: \"kubernetes.io/projected/b232485b-3f02-46aa-b939-3da7ae757414-kube-api-access-zrvdq\") on node \"crc\" DevicePath \"\"" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.543596 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.543610 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b232485b-3f02-46aa-b939-3da7ae757414-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.739925 4813 generic.go:334] "Generic (PLEG): container finished" podID="b232485b-3f02-46aa-b939-3da7ae757414" containerID="e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2" exitCode=0 Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.740012 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4kgll" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.740013 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4kgll" event={"ID":"b232485b-3f02-46aa-b939-3da7ae757414","Type":"ContainerDied","Data":"e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2"} Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.740099 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4kgll" event={"ID":"b232485b-3f02-46aa-b939-3da7ae757414","Type":"ContainerDied","Data":"866dc63e63e2bce7c99605839543c7a6ddf5cdc9d34feddde8d9617a1bd69d4a"} Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.740143 4813 scope.go:117] "RemoveContainer" containerID="e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.785609 4813 scope.go:117] "RemoveContainer" containerID="da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.794099 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4kgll"] Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.815487 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4kgll"] Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.820855 4813 scope.go:117] "RemoveContainer" containerID="cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.881282 4813 scope.go:117] "RemoveContainer" containerID="e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2" Oct 07 20:16:05 crc kubenswrapper[4813]: E1007 20:16:05.881709 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2\": container with ID starting with e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2 not found: ID does not exist" containerID="e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.881753 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2"} err="failed to get container status \"e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2\": rpc error: code = NotFound desc = could not find container \"e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2\": container with ID starting with e0aea099282014e456b1fbe7125bc6943d7f5b2239eba44b7bcc743527bcc5b2 not found: ID does not exist" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.881779 4813 scope.go:117] "RemoveContainer" containerID="da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5" Oct 07 20:16:05 crc kubenswrapper[4813]: E1007 20:16:05.882140 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5\": container with ID starting with da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5 not found: ID does not exist" containerID="da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.882187 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5"} err="failed to get container status \"da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5\": rpc error: code = NotFound desc = could not find container \"da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5\": container with ID starting with da493f3faa7339705154bc54288475fea7de7f8ccf1974d4adbc10e8a08a12f5 not found: ID does not exist" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.882218 4813 scope.go:117] "RemoveContainer" containerID="cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820" Oct 07 20:16:05 crc kubenswrapper[4813]: E1007 20:16:05.882600 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820\": container with ID starting with cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820 not found: ID does not exist" containerID="cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820" Oct 07 20:16:05 crc kubenswrapper[4813]: I1007 20:16:05.882632 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820"} err="failed to get container status \"cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820\": rpc error: code = NotFound desc = could not find container \"cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820\": container with ID starting with cc2381fa72e066a0778a0ea0317c60d0eed72d125ff076dd9330e492df328820 not found: ID does not exist" Oct 07 20:16:06 crc kubenswrapper[4813]: I1007 20:16:06.613908 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b232485b-3f02-46aa-b939-3da7ae757414" path="/var/lib/kubelet/pods/b232485b-3f02-46aa-b939-3da7ae757414/volumes" Oct 07 20:16:52 crc kubenswrapper[4813]: I1007 20:16:52.079141 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:16:52 crc kubenswrapper[4813]: I1007 20:16:52.079708 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:17:22 crc kubenswrapper[4813]: I1007 20:17:22.079226 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:17:22 crc kubenswrapper[4813]: I1007 20:17:22.080008 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.079737 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.080505 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.080561 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.081417 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.081512 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" gracePeriod=600 Oct 07 20:17:52 crc kubenswrapper[4813]: E1007 20:17:52.206436 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.827724 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" exitCode=0 Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.827784 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef"} Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.828081 4813 scope.go:117] "RemoveContainer" containerID="219803f8ae984cfeee72a9dfecefa72be26401a3ec358d9eeeaba0c71ada998b" Oct 07 20:17:52 crc kubenswrapper[4813]: I1007 20:17:52.832463 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:17:52 crc kubenswrapper[4813]: E1007 20:17:52.833216 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:18:07 crc kubenswrapper[4813]: I1007 20:18:07.603290 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:18:07 crc kubenswrapper[4813]: E1007 20:18:07.604412 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:18:20 crc kubenswrapper[4813]: I1007 20:18:20.603286 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:18:20 crc kubenswrapper[4813]: E1007 20:18:20.604965 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:18:31 crc kubenswrapper[4813]: I1007 20:18:31.603173 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:18:31 crc kubenswrapper[4813]: E1007 20:18:31.604161 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:18:43 crc kubenswrapper[4813]: I1007 20:18:43.602846 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:18:43 crc kubenswrapper[4813]: E1007 20:18:43.603471 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:18:56 crc kubenswrapper[4813]: I1007 20:18:56.603037 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:18:56 crc kubenswrapper[4813]: E1007 20:18:56.603914 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:19:08 crc kubenswrapper[4813]: I1007 20:19:08.602931 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:19:08 crc kubenswrapper[4813]: E1007 20:19:08.603830 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:19:20 crc kubenswrapper[4813]: I1007 20:19:20.603779 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:19:20 crc kubenswrapper[4813]: E1007 20:19:20.605193 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:19:34 crc kubenswrapper[4813]: I1007 20:19:34.622399 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:19:34 crc kubenswrapper[4813]: E1007 20:19:34.623693 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:19:49 crc kubenswrapper[4813]: I1007 20:19:49.602511 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:19:49 crc kubenswrapper[4813]: E1007 20:19:49.603172 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:20:02 crc kubenswrapper[4813]: I1007 20:20:02.602813 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:20:02 crc kubenswrapper[4813]: E1007 20:20:02.603798 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:20:13 crc kubenswrapper[4813]: I1007 20:20:13.603452 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:20:13 crc kubenswrapper[4813]: E1007 20:20:13.604722 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:20:25 crc kubenswrapper[4813]: I1007 20:20:25.603487 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:20:25 crc kubenswrapper[4813]: E1007 20:20:25.604302 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:20:38 crc kubenswrapper[4813]: I1007 20:20:38.605505 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:20:38 crc kubenswrapper[4813]: E1007 20:20:38.606385 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:20:51 crc kubenswrapper[4813]: I1007 20:20:51.602725 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:20:51 crc kubenswrapper[4813]: E1007 20:20:51.604779 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:21:06 crc kubenswrapper[4813]: I1007 20:21:06.603807 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:21:06 crc kubenswrapper[4813]: E1007 20:21:06.605296 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:21:19 crc kubenswrapper[4813]: I1007 20:21:19.603678 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:21:19 crc kubenswrapper[4813]: E1007 20:21:19.604579 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:21:32 crc kubenswrapper[4813]: I1007 20:21:32.603455 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:21:32 crc kubenswrapper[4813]: E1007 20:21:32.604430 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:21:43 crc kubenswrapper[4813]: I1007 20:21:43.603197 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:21:43 crc kubenswrapper[4813]: E1007 20:21:43.604450 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:21:58 crc kubenswrapper[4813]: I1007 20:21:58.603195 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:21:58 crc kubenswrapper[4813]: E1007 20:21:58.604071 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:22:09 crc kubenswrapper[4813]: I1007 20:22:09.602973 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:22:09 crc kubenswrapper[4813]: E1007 20:22:09.603908 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.360988 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nd6nn"] Oct 07 20:22:10 crc kubenswrapper[4813]: E1007 20:22:10.361753 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="extract-utilities" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.361850 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="extract-utilities" Oct 07 20:22:10 crc kubenswrapper[4813]: E1007 20:22:10.361958 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="registry-server" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.362034 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="registry-server" Oct 07 20:22:10 crc kubenswrapper[4813]: E1007 20:22:10.362125 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="extract-content" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.362195 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="extract-content" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.362508 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="b232485b-3f02-46aa-b939-3da7ae757414" containerName="registry-server" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.364221 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.380786 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nd6nn"] Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.452117 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-utilities\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.452166 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-catalog-content\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.452280 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp97k\" (UniqueName: \"kubernetes.io/projected/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-kube-api-access-fp97k\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.554427 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-utilities\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.554683 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-catalog-content\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.554861 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp97k\" (UniqueName: \"kubernetes.io/projected/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-kube-api-access-fp97k\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.555015 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-utilities\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.555098 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-catalog-content\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.575092 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp97k\" (UniqueName: \"kubernetes.io/projected/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-kube-api-access-fp97k\") pod \"redhat-operators-nd6nn\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:10 crc kubenswrapper[4813]: I1007 20:22:10.728270 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:11 crc kubenswrapper[4813]: I1007 20:22:11.296635 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nd6nn"] Oct 07 20:22:11 crc kubenswrapper[4813]: I1007 20:22:11.455802 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerStarted","Data":"105ac207ae6410ee199f98639d42eeb31e663be86f43bad036e3da098dee1523"} Oct 07 20:22:12 crc kubenswrapper[4813]: I1007 20:22:12.464725 4813 generic.go:334] "Generic (PLEG): container finished" podID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerID="a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d" exitCode=0 Oct 07 20:22:12 crc kubenswrapper[4813]: I1007 20:22:12.464770 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerDied","Data":"a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d"} Oct 07 20:22:12 crc kubenswrapper[4813]: I1007 20:22:12.467216 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 20:22:14 crc kubenswrapper[4813]: I1007 20:22:14.484580 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerStarted","Data":"3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7"} Oct 07 20:22:17 crc kubenswrapper[4813]: I1007 20:22:17.519916 4813 generic.go:334] "Generic (PLEG): container finished" podID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerID="3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7" exitCode=0 Oct 07 20:22:17 crc kubenswrapper[4813]: I1007 20:22:17.520680 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerDied","Data":"3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7"} Oct 07 20:22:18 crc kubenswrapper[4813]: I1007 20:22:18.538219 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerStarted","Data":"fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0"} Oct 07 20:22:18 crc kubenswrapper[4813]: I1007 20:22:18.556596 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nd6nn" podStartSLOduration=2.778342213 podStartE2EDuration="8.556569677s" podCreationTimestamp="2025-10-07 20:22:10 +0000 UTC" firstStartedPulling="2025-10-07 20:22:12.467027656 +0000 UTC m=+3858.545283257" lastFinishedPulling="2025-10-07 20:22:18.24525511 +0000 UTC m=+3864.323510721" observedRunningTime="2025-10-07 20:22:18.555115206 +0000 UTC m=+3864.633370857" watchObservedRunningTime="2025-10-07 20:22:18.556569677 +0000 UTC m=+3864.634825328" Oct 07 20:22:20 crc kubenswrapper[4813]: I1007 20:22:20.728611 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:20 crc kubenswrapper[4813]: I1007 20:22:20.730131 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:21 crc kubenswrapper[4813]: I1007 20:22:21.784577 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nd6nn" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="registry-server" probeResult="failure" output=< Oct 07 20:22:21 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:22:21 crc kubenswrapper[4813]: > Oct 07 20:22:24 crc kubenswrapper[4813]: I1007 20:22:24.612826 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:22:24 crc kubenswrapper[4813]: E1007 20:22:24.613569 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:22:30 crc kubenswrapper[4813]: I1007 20:22:30.789151 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:30 crc kubenswrapper[4813]: I1007 20:22:30.853212 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:31 crc kubenswrapper[4813]: I1007 20:22:31.076882 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nd6nn"] Oct 07 20:22:32 crc kubenswrapper[4813]: I1007 20:22:32.676379 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nd6nn" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="registry-server" containerID="cri-o://fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0" gracePeriod=2 Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.225586 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.318441 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fp97k\" (UniqueName: \"kubernetes.io/projected/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-kube-api-access-fp97k\") pod \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.318693 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-catalog-content\") pod \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.318757 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-utilities\") pod \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\" (UID: \"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a\") " Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.319873 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-utilities" (OuterVolumeSpecName: "utilities") pod "1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" (UID: "1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.320384 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.336258 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-kube-api-access-fp97k" (OuterVolumeSpecName: "kube-api-access-fp97k") pod "1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" (UID: "1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a"). InnerVolumeSpecName "kube-api-access-fp97k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.423442 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fp97k\" (UniqueName: \"kubernetes.io/projected/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-kube-api-access-fp97k\") on node \"crc\" DevicePath \"\"" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.449039 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" (UID: "1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.525545 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.689676 4813 generic.go:334] "Generic (PLEG): container finished" podID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerID="fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0" exitCode=0 Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.689725 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerDied","Data":"fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0"} Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.689756 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nd6nn" event={"ID":"1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a","Type":"ContainerDied","Data":"105ac207ae6410ee199f98639d42eeb31e663be86f43bad036e3da098dee1523"} Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.689781 4813 scope.go:117] "RemoveContainer" containerID="fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.689786 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nd6nn" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.717838 4813 scope.go:117] "RemoveContainer" containerID="3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.739726 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nd6nn"] Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.748746 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nd6nn"] Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.758051 4813 scope.go:117] "RemoveContainer" containerID="a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.801071 4813 scope.go:117] "RemoveContainer" containerID="fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0" Oct 07 20:22:33 crc kubenswrapper[4813]: E1007 20:22:33.801666 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0\": container with ID starting with fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0 not found: ID does not exist" containerID="fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.801708 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0"} err="failed to get container status \"fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0\": rpc error: code = NotFound desc = could not find container \"fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0\": container with ID starting with fc409beb41ff0298b56ff81b183502cb10218d84fd6dd3e2c0000062598eeea0 not found: ID does not exist" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.801733 4813 scope.go:117] "RemoveContainer" containerID="3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7" Oct 07 20:22:33 crc kubenswrapper[4813]: E1007 20:22:33.802157 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7\": container with ID starting with 3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7 not found: ID does not exist" containerID="3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.802194 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7"} err="failed to get container status \"3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7\": rpc error: code = NotFound desc = could not find container \"3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7\": container with ID starting with 3016f1c80a50747c6314d2da7e65ac4bd6e10a19036b0c39ce7fc63ee43512d7 not found: ID does not exist" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.802215 4813 scope.go:117] "RemoveContainer" containerID="a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d" Oct 07 20:22:33 crc kubenswrapper[4813]: E1007 20:22:33.802606 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d\": container with ID starting with a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d not found: ID does not exist" containerID="a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d" Oct 07 20:22:33 crc kubenswrapper[4813]: I1007 20:22:33.802640 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d"} err="failed to get container status \"a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d\": rpc error: code = NotFound desc = could not find container \"a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d\": container with ID starting with a219e190bd83b4f84946d889a82d8a937f9d8e1e47dea4690178e0234f704f7d not found: ID does not exist" Oct 07 20:22:34 crc kubenswrapper[4813]: I1007 20:22:34.623864 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" path="/var/lib/kubelet/pods/1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a/volumes" Oct 07 20:22:38 crc kubenswrapper[4813]: I1007 20:22:38.606064 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:22:38 crc kubenswrapper[4813]: E1007 20:22:38.607355 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.066375 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hmdfh"] Oct 07 20:22:44 crc kubenswrapper[4813]: E1007 20:22:44.067152 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="registry-server" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.067164 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="registry-server" Oct 07 20:22:44 crc kubenswrapper[4813]: E1007 20:22:44.067183 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="extract-utilities" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.067189 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="extract-utilities" Oct 07 20:22:44 crc kubenswrapper[4813]: E1007 20:22:44.067203 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="extract-content" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.067209 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="extract-content" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.067393 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c25e6f3-0cb5-48ed-bf22-f8f71f9ee36a" containerName="registry-server" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.068613 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.079418 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hmdfh"] Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.159039 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spd76\" (UniqueName: \"kubernetes.io/projected/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-kube-api-access-spd76\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.159085 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-catalog-content\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.159118 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-utilities\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.260991 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spd76\" (UniqueName: \"kubernetes.io/projected/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-kube-api-access-spd76\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.261272 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-catalog-content\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.261449 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-utilities\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.261744 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-catalog-content\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.261844 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-utilities\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.291868 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spd76\" (UniqueName: \"kubernetes.io/projected/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-kube-api-access-spd76\") pod \"certified-operators-hmdfh\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.415161 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:44 crc kubenswrapper[4813]: I1007 20:22:44.920195 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hmdfh"] Oct 07 20:22:45 crc kubenswrapper[4813]: I1007 20:22:45.825704 4813 generic.go:334] "Generic (PLEG): container finished" podID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerID="e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f" exitCode=0 Oct 07 20:22:45 crc kubenswrapper[4813]: I1007 20:22:45.825778 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerDied","Data":"e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f"} Oct 07 20:22:45 crc kubenswrapper[4813]: I1007 20:22:45.826145 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerStarted","Data":"db23aa54e628a7904581266d3a18e431be7eb2f0ea75860c74fe8efe57995988"} Oct 07 20:22:47 crc kubenswrapper[4813]: I1007 20:22:47.850512 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerStarted","Data":"3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef"} Oct 07 20:22:48 crc kubenswrapper[4813]: I1007 20:22:48.864255 4813 generic.go:334] "Generic (PLEG): container finished" podID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerID="3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef" exitCode=0 Oct 07 20:22:48 crc kubenswrapper[4813]: I1007 20:22:48.864357 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerDied","Data":"3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef"} Oct 07 20:22:49 crc kubenswrapper[4813]: I1007 20:22:49.878965 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerStarted","Data":"8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381"} Oct 07 20:22:49 crc kubenswrapper[4813]: I1007 20:22:49.914096 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hmdfh" podStartSLOduration=2.482216642 podStartE2EDuration="5.914073582s" podCreationTimestamp="2025-10-07 20:22:44 +0000 UTC" firstStartedPulling="2025-10-07 20:22:45.828593839 +0000 UTC m=+3891.906849480" lastFinishedPulling="2025-10-07 20:22:49.260450799 +0000 UTC m=+3895.338706420" observedRunningTime="2025-10-07 20:22:49.91051391 +0000 UTC m=+3895.988769531" watchObservedRunningTime="2025-10-07 20:22:49.914073582 +0000 UTC m=+3895.992329203" Oct 07 20:22:51 crc kubenswrapper[4813]: I1007 20:22:51.603215 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:22:51 crc kubenswrapper[4813]: E1007 20:22:51.604430 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:22:54 crc kubenswrapper[4813]: I1007 20:22:54.416073 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:54 crc kubenswrapper[4813]: I1007 20:22:54.416586 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:54 crc kubenswrapper[4813]: I1007 20:22:54.489010 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:55 crc kubenswrapper[4813]: I1007 20:22:55.008005 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:55 crc kubenswrapper[4813]: I1007 20:22:55.068298 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hmdfh"] Oct 07 20:22:56 crc kubenswrapper[4813]: I1007 20:22:56.965493 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hmdfh" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="registry-server" containerID="cri-o://8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381" gracePeriod=2 Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.709802 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.858065 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-utilities\") pod \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.858656 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spd76\" (UniqueName: \"kubernetes.io/projected/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-kube-api-access-spd76\") pod \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.859035 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-catalog-content\") pod \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\" (UID: \"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa\") " Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.866671 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-kube-api-access-spd76" (OuterVolumeSpecName: "kube-api-access-spd76") pod "f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" (UID: "f39fbb53-0940-46a0-b7ce-5e1fe6b368fa"). InnerVolumeSpecName "kube-api-access-spd76". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.869457 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-utilities" (OuterVolumeSpecName: "utilities") pod "f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" (UID: "f39fbb53-0940-46a0-b7ce-5e1fe6b368fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.944222 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" (UID: "f39fbb53-0940-46a0-b7ce-5e1fe6b368fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.962111 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spd76\" (UniqueName: \"kubernetes.io/projected/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-kube-api-access-spd76\") on node \"crc\" DevicePath \"\"" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.962578 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.962694 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.977252 4813 generic.go:334] "Generic (PLEG): container finished" podID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerID="8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381" exitCode=0 Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.977310 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerDied","Data":"8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381"} Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.977373 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hmdfh" event={"ID":"f39fbb53-0940-46a0-b7ce-5e1fe6b368fa","Type":"ContainerDied","Data":"db23aa54e628a7904581266d3a18e431be7eb2f0ea75860c74fe8efe57995988"} Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.977397 4813 scope.go:117] "RemoveContainer" containerID="8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381" Oct 07 20:22:57 crc kubenswrapper[4813]: I1007 20:22:57.977553 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hmdfh" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.021685 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hmdfh"] Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.026458 4813 scope.go:117] "RemoveContainer" containerID="3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.031459 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hmdfh"] Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.051514 4813 scope.go:117] "RemoveContainer" containerID="e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.106667 4813 scope.go:117] "RemoveContainer" containerID="8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381" Oct 07 20:22:58 crc kubenswrapper[4813]: E1007 20:22:58.107424 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381\": container with ID starting with 8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381 not found: ID does not exist" containerID="8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.107462 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381"} err="failed to get container status \"8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381\": rpc error: code = NotFound desc = could not find container \"8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381\": container with ID starting with 8853216471d6bb1b40b086913778d6a4a5e047fd2b3b39622235bd77ba7f2381 not found: ID does not exist" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.107488 4813 scope.go:117] "RemoveContainer" containerID="3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef" Oct 07 20:22:58 crc kubenswrapper[4813]: E1007 20:22:58.107776 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef\": container with ID starting with 3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef not found: ID does not exist" containerID="3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.107806 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef"} err="failed to get container status \"3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef\": rpc error: code = NotFound desc = could not find container \"3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef\": container with ID starting with 3b62b4c17192ee79d44e25bd6ae1f16875b7db1f16883cf1fb2f2576155235ef not found: ID does not exist" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.107826 4813 scope.go:117] "RemoveContainer" containerID="e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f" Oct 07 20:22:58 crc kubenswrapper[4813]: E1007 20:22:58.108167 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f\": container with ID starting with e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f not found: ID does not exist" containerID="e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.108216 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f"} err="failed to get container status \"e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f\": rpc error: code = NotFound desc = could not find container \"e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f\": container with ID starting with e4f44bb99910f85ca92d00c67a618698dc0ab049c5bbef6500982aafc3d5031f not found: ID does not exist" Oct 07 20:22:58 crc kubenswrapper[4813]: I1007 20:22:58.620540 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" path="/var/lib/kubelet/pods/f39fbb53-0940-46a0-b7ce-5e1fe6b368fa/volumes" Oct 07 20:23:05 crc kubenswrapper[4813]: I1007 20:23:05.602238 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.091792 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"dded469f21a70f38b4d485cd654250ddc8aee28d17a05bf6d7b63ac0babfc8d3"} Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.696839 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dpzk8"] Oct 07 20:23:06 crc kubenswrapper[4813]: E1007 20:23:06.697488 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="extract-content" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.697500 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="extract-content" Oct 07 20:23:06 crc kubenswrapper[4813]: E1007 20:23:06.697540 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="extract-utilities" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.697546 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="extract-utilities" Oct 07 20:23:06 crc kubenswrapper[4813]: E1007 20:23:06.697557 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="registry-server" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.697565 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="registry-server" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.697732 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39fbb53-0940-46a0-b7ce-5e1fe6b368fa" containerName="registry-server" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.699002 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.717436 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpzk8"] Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.845199 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-utilities\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.845525 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-catalog-content\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.845652 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlqtq\" (UniqueName: \"kubernetes.io/projected/9d1d0f2a-acfb-4044-8427-d16e19e5c942-kube-api-access-vlqtq\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.947765 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-utilities\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.947808 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-catalog-content\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.947856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlqtq\" (UniqueName: \"kubernetes.io/projected/9d1d0f2a-acfb-4044-8427-d16e19e5c942-kube-api-access-vlqtq\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.948916 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-utilities\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.950726 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-catalog-content\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:06 crc kubenswrapper[4813]: I1007 20:23:06.974125 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlqtq\" (UniqueName: \"kubernetes.io/projected/9d1d0f2a-acfb-4044-8427-d16e19e5c942-kube-api-access-vlqtq\") pod \"community-operators-dpzk8\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:07 crc kubenswrapper[4813]: I1007 20:23:07.040861 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:07 crc kubenswrapper[4813]: I1007 20:23:07.562342 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dpzk8"] Oct 07 20:23:07 crc kubenswrapper[4813]: W1007 20:23:07.570912 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d1d0f2a_acfb_4044_8427_d16e19e5c942.slice/crio-a20ad7c0161347a2037d9f3e307dd18364de9a08e00b8a709cf826ca5571eaee WatchSource:0}: Error finding container a20ad7c0161347a2037d9f3e307dd18364de9a08e00b8a709cf826ca5571eaee: Status 404 returned error can't find the container with id a20ad7c0161347a2037d9f3e307dd18364de9a08e00b8a709cf826ca5571eaee Oct 07 20:23:08 crc kubenswrapper[4813]: I1007 20:23:08.131280 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerID="9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263" exitCode=0 Oct 07 20:23:08 crc kubenswrapper[4813]: I1007 20:23:08.131363 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerDied","Data":"9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263"} Oct 07 20:23:08 crc kubenswrapper[4813]: I1007 20:23:08.131853 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerStarted","Data":"a20ad7c0161347a2037d9f3e307dd18364de9a08e00b8a709cf826ca5571eaee"} Oct 07 20:23:09 crc kubenswrapper[4813]: I1007 20:23:09.142901 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerStarted","Data":"fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14"} Oct 07 20:23:10 crc kubenswrapper[4813]: I1007 20:23:10.171703 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerID="fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14" exitCode=0 Oct 07 20:23:10 crc kubenswrapper[4813]: I1007 20:23:10.172047 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerDied","Data":"fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14"} Oct 07 20:23:11 crc kubenswrapper[4813]: I1007 20:23:11.186162 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerStarted","Data":"38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02"} Oct 07 20:23:11 crc kubenswrapper[4813]: I1007 20:23:11.209027 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dpzk8" podStartSLOduration=2.579847313 podStartE2EDuration="5.209008836s" podCreationTimestamp="2025-10-07 20:23:06 +0000 UTC" firstStartedPulling="2025-10-07 20:23:08.134694488 +0000 UTC m=+3914.212950089" lastFinishedPulling="2025-10-07 20:23:10.763856001 +0000 UTC m=+3916.842111612" observedRunningTime="2025-10-07 20:23:11.20356894 +0000 UTC m=+3917.281824551" watchObservedRunningTime="2025-10-07 20:23:11.209008836 +0000 UTC m=+3917.287264447" Oct 07 20:23:17 crc kubenswrapper[4813]: I1007 20:23:17.042516 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:17 crc kubenswrapper[4813]: I1007 20:23:17.044286 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:17 crc kubenswrapper[4813]: I1007 20:23:17.090100 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:17 crc kubenswrapper[4813]: I1007 20:23:17.291016 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:17 crc kubenswrapper[4813]: I1007 20:23:17.341082 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpzk8"] Oct 07 20:23:19 crc kubenswrapper[4813]: I1007 20:23:19.264490 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dpzk8" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="registry-server" containerID="cri-o://38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02" gracePeriod=2 Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.022076 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.103507 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-utilities\") pod \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.103588 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlqtq\" (UniqueName: \"kubernetes.io/projected/9d1d0f2a-acfb-4044-8427-d16e19e5c942-kube-api-access-vlqtq\") pod \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.103642 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-catalog-content\") pod \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\" (UID: \"9d1d0f2a-acfb-4044-8427-d16e19e5c942\") " Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.106049 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-utilities" (OuterVolumeSpecName: "utilities") pod "9d1d0f2a-acfb-4044-8427-d16e19e5c942" (UID: "9d1d0f2a-acfb-4044-8427-d16e19e5c942"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.112501 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d1d0f2a-acfb-4044-8427-d16e19e5c942-kube-api-access-vlqtq" (OuterVolumeSpecName: "kube-api-access-vlqtq") pod "9d1d0f2a-acfb-4044-8427-d16e19e5c942" (UID: "9d1d0f2a-acfb-4044-8427-d16e19e5c942"). InnerVolumeSpecName "kube-api-access-vlqtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.186748 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9d1d0f2a-acfb-4044-8427-d16e19e5c942" (UID: "9d1d0f2a-acfb-4044-8427-d16e19e5c942"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.205431 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.205460 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlqtq\" (UniqueName: \"kubernetes.io/projected/9d1d0f2a-acfb-4044-8427-d16e19e5c942-kube-api-access-vlqtq\") on node \"crc\" DevicePath \"\"" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.205470 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d1d0f2a-acfb-4044-8427-d16e19e5c942-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.276313 4813 generic.go:334] "Generic (PLEG): container finished" podID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerID="38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02" exitCode=0 Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.277249 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerDied","Data":"38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02"} Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.277395 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dpzk8" event={"ID":"9d1d0f2a-acfb-4044-8427-d16e19e5c942","Type":"ContainerDied","Data":"a20ad7c0161347a2037d9f3e307dd18364de9a08e00b8a709cf826ca5571eaee"} Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.277424 4813 scope.go:117] "RemoveContainer" containerID="38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.277483 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dpzk8" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.312337 4813 scope.go:117] "RemoveContainer" containerID="fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.313672 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dpzk8"] Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.337355 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dpzk8"] Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.338697 4813 scope.go:117] "RemoveContainer" containerID="9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.383992 4813 scope.go:117] "RemoveContainer" containerID="38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02" Oct 07 20:23:20 crc kubenswrapper[4813]: E1007 20:23:20.385996 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02\": container with ID starting with 38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02 not found: ID does not exist" containerID="38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.386053 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02"} err="failed to get container status \"38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02\": rpc error: code = NotFound desc = could not find container \"38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02\": container with ID starting with 38a7a9aba54bbbe0e2699d2977e50e063aa410f48a6b4ffd313a904334cb9f02 not found: ID does not exist" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.386091 4813 scope.go:117] "RemoveContainer" containerID="fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14" Oct 07 20:23:20 crc kubenswrapper[4813]: E1007 20:23:20.386572 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14\": container with ID starting with fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14 not found: ID does not exist" containerID="fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.386621 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14"} err="failed to get container status \"fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14\": rpc error: code = NotFound desc = could not find container \"fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14\": container with ID starting with fa6c73af3f7b8cdad553f872bee0d32a750334a07a1e81dd7ec7df44e7acab14 not found: ID does not exist" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.386655 4813 scope.go:117] "RemoveContainer" containerID="9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263" Oct 07 20:23:20 crc kubenswrapper[4813]: E1007 20:23:20.386952 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263\": container with ID starting with 9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263 not found: ID does not exist" containerID="9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.386992 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263"} err="failed to get container status \"9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263\": rpc error: code = NotFound desc = could not find container \"9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263\": container with ID starting with 9252b70d01456bb8870a9b5f035919f05d9ffc2c2200f8f40783bfa4d792d263 not found: ID does not exist" Oct 07 20:23:20 crc kubenswrapper[4813]: I1007 20:23:20.616357 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" path="/var/lib/kubelet/pods/9d1d0f2a-acfb-4044-8427-d16e19e5c942/volumes" Oct 07 20:25:22 crc kubenswrapper[4813]: I1007 20:25:22.078909 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:25:22 crc kubenswrapper[4813]: I1007 20:25:22.079581 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:25:52 crc kubenswrapper[4813]: I1007 20:25:52.079089 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:25:52 crc kubenswrapper[4813]: I1007 20:25:52.079681 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.664511 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jgzd8"] Oct 07 20:26:01 crc kubenswrapper[4813]: E1007 20:26:01.665303 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="extract-content" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.665314 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="extract-content" Oct 07 20:26:01 crc kubenswrapper[4813]: E1007 20:26:01.665329 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="registry-server" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.665335 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="registry-server" Oct 07 20:26:01 crc kubenswrapper[4813]: E1007 20:26:01.665366 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="extract-utilities" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.665372 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="extract-utilities" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.665547 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d1d0f2a-acfb-4044-8427-d16e19e5c942" containerName="registry-server" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.667932 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.683297 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgzd8"] Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.792485 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-utilities\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.792571 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgwqq\" (UniqueName: \"kubernetes.io/projected/03f77f46-6b51-42fc-866f-83404ce31848-kube-api-access-jgwqq\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.792611 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-catalog-content\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.895110 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-utilities\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.895213 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgwqq\" (UniqueName: \"kubernetes.io/projected/03f77f46-6b51-42fc-866f-83404ce31848-kube-api-access-jgwqq\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.895267 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-catalog-content\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.895567 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-utilities\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.895923 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-catalog-content\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.918265 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgwqq\" (UniqueName: \"kubernetes.io/projected/03f77f46-6b51-42fc-866f-83404ce31848-kube-api-access-jgwqq\") pod \"redhat-marketplace-jgzd8\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:01 crc kubenswrapper[4813]: I1007 20:26:01.993598 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:02 crc kubenswrapper[4813]: I1007 20:26:02.440544 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgzd8"] Oct 07 20:26:02 crc kubenswrapper[4813]: I1007 20:26:02.988228 4813 generic.go:334] "Generic (PLEG): container finished" podID="03f77f46-6b51-42fc-866f-83404ce31848" containerID="0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097" exitCode=0 Oct 07 20:26:02 crc kubenswrapper[4813]: I1007 20:26:02.988314 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerDied","Data":"0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097"} Oct 07 20:26:02 crc kubenswrapper[4813]: I1007 20:26:02.988423 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerStarted","Data":"bed0f69ea8b41a39b13a97da204f1f98f34e2784e4909068f34b2b51dfbe66b2"} Oct 07 20:26:04 crc kubenswrapper[4813]: I1007 20:26:04.004677 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerStarted","Data":"e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f"} Oct 07 20:26:05 crc kubenswrapper[4813]: I1007 20:26:05.024080 4813 generic.go:334] "Generic (PLEG): container finished" podID="03f77f46-6b51-42fc-866f-83404ce31848" containerID="e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f" exitCode=0 Oct 07 20:26:05 crc kubenswrapper[4813]: I1007 20:26:05.024154 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerDied","Data":"e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f"} Oct 07 20:26:06 crc kubenswrapper[4813]: I1007 20:26:06.034971 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerStarted","Data":"429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c"} Oct 07 20:26:06 crc kubenswrapper[4813]: I1007 20:26:06.058629 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jgzd8" podStartSLOduration=2.602980354 podStartE2EDuration="5.058609966s" podCreationTimestamp="2025-10-07 20:26:01 +0000 UTC" firstStartedPulling="2025-10-07 20:26:02.991184565 +0000 UTC m=+4089.069440186" lastFinishedPulling="2025-10-07 20:26:05.446814187 +0000 UTC m=+4091.525069798" observedRunningTime="2025-10-07 20:26:06.057661929 +0000 UTC m=+4092.135917540" watchObservedRunningTime="2025-10-07 20:26:06.058609966 +0000 UTC m=+4092.136865597" Oct 07 20:26:11 crc kubenswrapper[4813]: I1007 20:26:11.994667 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:11 crc kubenswrapper[4813]: I1007 20:26:11.997472 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:12 crc kubenswrapper[4813]: I1007 20:26:12.163905 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:13 crc kubenswrapper[4813]: I1007 20:26:13.178079 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:13 crc kubenswrapper[4813]: I1007 20:26:13.230179 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgzd8"] Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.127731 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jgzd8" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="registry-server" containerID="cri-o://429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c" gracePeriod=2 Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.674130 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.805987 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgwqq\" (UniqueName: \"kubernetes.io/projected/03f77f46-6b51-42fc-866f-83404ce31848-kube-api-access-jgwqq\") pod \"03f77f46-6b51-42fc-866f-83404ce31848\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.806107 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-utilities\") pod \"03f77f46-6b51-42fc-866f-83404ce31848\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.806172 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-catalog-content\") pod \"03f77f46-6b51-42fc-866f-83404ce31848\" (UID: \"03f77f46-6b51-42fc-866f-83404ce31848\") " Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.807604 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-utilities" (OuterVolumeSpecName: "utilities") pod "03f77f46-6b51-42fc-866f-83404ce31848" (UID: "03f77f46-6b51-42fc-866f-83404ce31848"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.828866 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03f77f46-6b51-42fc-866f-83404ce31848-kube-api-access-jgwqq" (OuterVolumeSpecName: "kube-api-access-jgwqq") pod "03f77f46-6b51-42fc-866f-83404ce31848" (UID: "03f77f46-6b51-42fc-866f-83404ce31848"). InnerVolumeSpecName "kube-api-access-jgwqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.829424 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03f77f46-6b51-42fc-866f-83404ce31848" (UID: "03f77f46-6b51-42fc-866f-83404ce31848"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.908525 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.908559 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03f77f46-6b51-42fc-866f-83404ce31848-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:26:15 crc kubenswrapper[4813]: I1007 20:26:15.908570 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgwqq\" (UniqueName: \"kubernetes.io/projected/03f77f46-6b51-42fc-866f-83404ce31848-kube-api-access-jgwqq\") on node \"crc\" DevicePath \"\"" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.139693 4813 generic.go:334] "Generic (PLEG): container finished" podID="03f77f46-6b51-42fc-866f-83404ce31848" containerID="429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c" exitCode=0 Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.139743 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerDied","Data":"429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c"} Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.139803 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgzd8" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.140743 4813 scope.go:117] "RemoveContainer" containerID="429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.140657 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgzd8" event={"ID":"03f77f46-6b51-42fc-866f-83404ce31848","Type":"ContainerDied","Data":"bed0f69ea8b41a39b13a97da204f1f98f34e2784e4909068f34b2b51dfbe66b2"} Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.176213 4813 scope.go:117] "RemoveContainer" containerID="e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.198428 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgzd8"] Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.215022 4813 scope.go:117] "RemoveContainer" containerID="0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.216517 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgzd8"] Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.254245 4813 scope.go:117] "RemoveContainer" containerID="429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c" Oct 07 20:26:16 crc kubenswrapper[4813]: E1007 20:26:16.254999 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c\": container with ID starting with 429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c not found: ID does not exist" containerID="429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.255109 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c"} err="failed to get container status \"429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c\": rpc error: code = NotFound desc = could not find container \"429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c\": container with ID starting with 429c3bdd54c5f2ef0725eb318b0b0e6d424f4c0dff70348b3cb5103b7520d69c not found: ID does not exist" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.255182 4813 scope.go:117] "RemoveContainer" containerID="e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f" Oct 07 20:26:16 crc kubenswrapper[4813]: E1007 20:26:16.255551 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f\": container with ID starting with e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f not found: ID does not exist" containerID="e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.255653 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f"} err="failed to get container status \"e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f\": rpc error: code = NotFound desc = could not find container \"e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f\": container with ID starting with e85cda84afd2a3a1e6c13b3e3ea310d78b9d4ce05302c80475536a0509f9696f not found: ID does not exist" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.255725 4813 scope.go:117] "RemoveContainer" containerID="0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097" Oct 07 20:26:16 crc kubenswrapper[4813]: E1007 20:26:16.255997 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097\": container with ID starting with 0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097 not found: ID does not exist" containerID="0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.256071 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097"} err="failed to get container status \"0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097\": rpc error: code = NotFound desc = could not find container \"0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097\": container with ID starting with 0b127dddc366ae4d778a417b609051d8e7614ad2bcf257cd56f8a1e80ed4f097 not found: ID does not exist" Oct 07 20:26:16 crc kubenswrapper[4813]: I1007 20:26:16.613310 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03f77f46-6b51-42fc-866f-83404ce31848" path="/var/lib/kubelet/pods/03f77f46-6b51-42fc-866f-83404ce31848/volumes" Oct 07 20:26:22 crc kubenswrapper[4813]: I1007 20:26:22.079337 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:26:22 crc kubenswrapper[4813]: I1007 20:26:22.079847 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:26:22 crc kubenswrapper[4813]: I1007 20:26:22.079898 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:26:22 crc kubenswrapper[4813]: I1007 20:26:22.080493 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dded469f21a70f38b4d485cd654250ddc8aee28d17a05bf6d7b63ac0babfc8d3"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:26:22 crc kubenswrapper[4813]: I1007 20:26:22.080550 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://dded469f21a70f38b4d485cd654250ddc8aee28d17a05bf6d7b63ac0babfc8d3" gracePeriod=600 Oct 07 20:26:23 crc kubenswrapper[4813]: I1007 20:26:23.223828 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="dded469f21a70f38b4d485cd654250ddc8aee28d17a05bf6d7b63ac0babfc8d3" exitCode=0 Oct 07 20:26:23 crc kubenswrapper[4813]: I1007 20:26:23.223907 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"dded469f21a70f38b4d485cd654250ddc8aee28d17a05bf6d7b63ac0babfc8d3"} Oct 07 20:26:23 crc kubenswrapper[4813]: I1007 20:26:23.224501 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2"} Oct 07 20:26:23 crc kubenswrapper[4813]: I1007 20:26:23.224535 4813 scope.go:117] "RemoveContainer" containerID="9365e106a66b00ad9f2e1b4802511d1855342d6f8dad8b122e731c2f9a2f91ef" Oct 07 20:27:04 crc kubenswrapper[4813]: I1007 20:27:04.666675 4813 generic.go:334] "Generic (PLEG): container finished" podID="8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" containerID="bc0ad3b3ef13b1b9a0c280b9a982402955af3d850ed84a536ff1d087840fec09" exitCode=0 Oct 07 20:27:04 crc kubenswrapper[4813]: I1007 20:27:04.666778 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136","Type":"ContainerDied","Data":"bc0ad3b3ef13b1b9a0c280b9a982402955af3d850ed84a536ff1d087840fec09"} Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.053411 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150288 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150359 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-workdir\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150449 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config-secret\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150494 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwbm\" (UniqueName: \"kubernetes.io/projected/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-kube-api-access-nzwbm\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150640 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ssh-key\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150685 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-config-data\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150712 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ca-certs\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150807 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.150836 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-temporary\") pod \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\" (UID: \"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136\") " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.151861 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.151869 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-config-data" (OuterVolumeSpecName: "config-data") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.154235 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.174844 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "test-operator-logs") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.179166 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-kube-api-access-nzwbm" (OuterVolumeSpecName: "kube-api-access-nzwbm") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "kube-api-access-nzwbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.179776 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.190267 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.221038 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.222381 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" (UID: "8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253027 4813 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253062 4813 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-config-data\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253073 4813 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253081 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253094 4813 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253777 4813 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253795 4813 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253805 4813 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.253815 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwbm\" (UniqueName: \"kubernetes.io/projected/8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136-kube-api-access-nzwbm\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.275663 4813 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.355649 4813 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.691657 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136","Type":"ContainerDied","Data":"230cff4ef4fea6c84def1fdee0bb4eae74f6d6e1a3758a468a5113f904ce0c2b"} Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.691879 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="230cff4ef4fea6c84def1fdee0bb4eae74f6d6e1a3758a468a5113f904ce0c2b" Oct 07 20:27:06 crc kubenswrapper[4813]: I1007 20:27:06.691763 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.913693 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 20:27:09 crc kubenswrapper[4813]: E1007 20:27:09.914853 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="extract-content" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.914870 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="extract-content" Oct 07 20:27:09 crc kubenswrapper[4813]: E1007 20:27:09.914894 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="extract-utilities" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.914902 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="extract-utilities" Oct 07 20:27:09 crc kubenswrapper[4813]: E1007 20:27:09.914918 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="registry-server" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.914930 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="registry-server" Oct 07 20:27:09 crc kubenswrapper[4813]: E1007 20:27:09.914953 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" containerName="tempest-tests-tempest-tests-runner" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.914962 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" containerName="tempest-tests-tempest-tests-runner" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.915199 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f77f46-6b51-42fc-866f-83404ce31848" containerName="registry-server" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.915221 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136" containerName="tempest-tests-tempest-tests-runner" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.915973 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.926230 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-p6n6j" Oct 07 20:27:09 crc kubenswrapper[4813]: I1007 20:27:09.991728 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 20:27:10 crc kubenswrapper[4813]: I1007 20:27:10.049644 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:10 crc kubenswrapper[4813]: I1007 20:27:10.049744 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpbzh\" (UniqueName: \"kubernetes.io/projected/938f4244-0f47-43a8-af88-c2a117af6d37-kube-api-access-xpbzh\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:10 crc kubenswrapper[4813]: I1007 20:27:10.151421 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:10 crc kubenswrapper[4813]: I1007 20:27:10.151512 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpbzh\" (UniqueName: \"kubernetes.io/projected/938f4244-0f47-43a8-af88-c2a117af6d37-kube-api-access-xpbzh\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:10 crc kubenswrapper[4813]: I1007 20:27:10.154676 4813 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:11 crc kubenswrapper[4813]: I1007 20:27:11.010853 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpbzh\" (UniqueName: \"kubernetes.io/projected/938f4244-0f47-43a8-af88-c2a117af6d37-kube-api-access-xpbzh\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:11 crc kubenswrapper[4813]: I1007 20:27:11.295974 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"938f4244-0f47-43a8-af88-c2a117af6d37\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:11 crc kubenswrapper[4813]: I1007 20:27:11.462771 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 07 20:27:11 crc kubenswrapper[4813]: I1007 20:27:11.951111 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 07 20:27:12 crc kubenswrapper[4813]: I1007 20:27:12.775546 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"938f4244-0f47-43a8-af88-c2a117af6d37","Type":"ContainerStarted","Data":"1cde98eae4bef921905c05b4f6f5795f91f61a82fc674a23577ca63743141c3b"} Oct 07 20:27:14 crc kubenswrapper[4813]: I1007 20:27:14.798807 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"938f4244-0f47-43a8-af88-c2a117af6d37","Type":"ContainerStarted","Data":"bd57895782f6081a94ef682f6ed77e17cef0dafb7d54d264a177d7190c35ea18"} Oct 07 20:27:14 crc kubenswrapper[4813]: I1007 20:27:14.820040 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=4.097780197 podStartE2EDuration="5.820019407s" podCreationTimestamp="2025-10-07 20:27:09 +0000 UTC" firstStartedPulling="2025-10-07 20:27:11.957644701 +0000 UTC m=+4158.035900322" lastFinishedPulling="2025-10-07 20:27:13.679883921 +0000 UTC m=+4159.758139532" observedRunningTime="2025-10-07 20:27:14.815385404 +0000 UTC m=+4160.893641025" watchObservedRunningTime="2025-10-07 20:27:14.820019407 +0000 UTC m=+4160.898275028" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.459672 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xl6b/must-gather-h7j48"] Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.461584 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.471310 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9xl6b/must-gather-h7j48"] Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.502147 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9xl6b"/"default-dockercfg-wckn6" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.504417 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9xl6b"/"openshift-service-ca.crt" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.505723 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9xl6b"/"kube-root-ca.crt" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.618912 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/631e40ec-babd-47ce-84e3-971caf0bbfdf-must-gather-output\") pod \"must-gather-h7j48\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.618970 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wfjb\" (UniqueName: \"kubernetes.io/projected/631e40ec-babd-47ce-84e3-971caf0bbfdf-kube-api-access-8wfjb\") pod \"must-gather-h7j48\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.720206 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/631e40ec-babd-47ce-84e3-971caf0bbfdf-must-gather-output\") pod \"must-gather-h7j48\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.720261 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wfjb\" (UniqueName: \"kubernetes.io/projected/631e40ec-babd-47ce-84e3-971caf0bbfdf-kube-api-access-8wfjb\") pod \"must-gather-h7j48\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.721668 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/631e40ec-babd-47ce-84e3-971caf0bbfdf-must-gather-output\") pod \"must-gather-h7j48\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.739043 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wfjb\" (UniqueName: \"kubernetes.io/projected/631e40ec-babd-47ce-84e3-971caf0bbfdf-kube-api-access-8wfjb\") pod \"must-gather-h7j48\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:29 crc kubenswrapper[4813]: I1007 20:27:29.812459 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:27:30 crc kubenswrapper[4813]: I1007 20:27:30.292080 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9xl6b/must-gather-h7j48"] Oct 07 20:27:30 crc kubenswrapper[4813]: I1007 20:27:30.306553 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 20:27:31 crc kubenswrapper[4813]: I1007 20:27:31.003714 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/must-gather-h7j48" event={"ID":"631e40ec-babd-47ce-84e3-971caf0bbfdf","Type":"ContainerStarted","Data":"384c11dd0feab52919964226a9adc707b1b7c3f052eaef64a7fb902d712b68b9"} Oct 07 20:27:35 crc kubenswrapper[4813]: I1007 20:27:35.043921 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/must-gather-h7j48" event={"ID":"631e40ec-babd-47ce-84e3-971caf0bbfdf","Type":"ContainerStarted","Data":"7d250d368f38b7bdbafb45773ee82814946e5c0c8c582c069992378b6a5bc61c"} Oct 07 20:27:36 crc kubenswrapper[4813]: I1007 20:27:36.055284 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/must-gather-h7j48" event={"ID":"631e40ec-babd-47ce-84e3-971caf0bbfdf","Type":"ContainerStarted","Data":"af270c345d3f75ab5726427ddbfaae04cb1c5d2b2a87e3fc6be05874a5e19c96"} Oct 07 20:27:36 crc kubenswrapper[4813]: I1007 20:27:36.076171 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9xl6b/must-gather-h7j48" podStartSLOduration=2.904374324 podStartE2EDuration="7.076144881s" podCreationTimestamp="2025-10-07 20:27:29 +0000 UTC" firstStartedPulling="2025-10-07 20:27:30.306180005 +0000 UTC m=+4176.384435626" lastFinishedPulling="2025-10-07 20:27:34.477950562 +0000 UTC m=+4180.556206183" observedRunningTime="2025-10-07 20:27:36.073088853 +0000 UTC m=+4182.151344504" watchObservedRunningTime="2025-10-07 20:27:36.076144881 +0000 UTC m=+4182.154400482" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.547147 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-wdnjz"] Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.550332 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.672757 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zz56\" (UniqueName: \"kubernetes.io/projected/43f4bcdc-4699-46d3-abf7-1e66a6374bff-kube-api-access-2zz56\") pod \"crc-debug-wdnjz\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.672873 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43f4bcdc-4699-46d3-abf7-1e66a6374bff-host\") pod \"crc-debug-wdnjz\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.775897 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zz56\" (UniqueName: \"kubernetes.io/projected/43f4bcdc-4699-46d3-abf7-1e66a6374bff-kube-api-access-2zz56\") pod \"crc-debug-wdnjz\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.776983 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43f4bcdc-4699-46d3-abf7-1e66a6374bff-host\") pod \"crc-debug-wdnjz\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.777041 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43f4bcdc-4699-46d3-abf7-1e66a6374bff-host\") pod \"crc-debug-wdnjz\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.800585 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zz56\" (UniqueName: \"kubernetes.io/projected/43f4bcdc-4699-46d3-abf7-1e66a6374bff-kube-api-access-2zz56\") pod \"crc-debug-wdnjz\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:40 crc kubenswrapper[4813]: I1007 20:27:40.872476 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:27:41 crc kubenswrapper[4813]: I1007 20:27:41.126832 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" event={"ID":"43f4bcdc-4699-46d3-abf7-1e66a6374bff","Type":"ContainerStarted","Data":"a74b248c28781e29a48e2f7310f9c4388b6205fbbe33c8e115fa6dc9748c32a2"} Oct 07 20:27:52 crc kubenswrapper[4813]: I1007 20:27:52.296543 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" event={"ID":"43f4bcdc-4699-46d3-abf7-1e66a6374bff","Type":"ContainerStarted","Data":"6938e811096448d54f8482a2f0751a80edc4d254c1f77838ff83b6397dcc33fe"} Oct 07 20:27:52 crc kubenswrapper[4813]: I1007 20:27:52.314093 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" podStartSLOduration=1.785114087 podStartE2EDuration="12.314075441s" podCreationTimestamp="2025-10-07 20:27:40 +0000 UTC" firstStartedPulling="2025-10-07 20:27:40.917700166 +0000 UTC m=+4186.995955777" lastFinishedPulling="2025-10-07 20:27:51.44666152 +0000 UTC m=+4197.524917131" observedRunningTime="2025-10-07 20:27:52.307911294 +0000 UTC m=+4198.386166905" watchObservedRunningTime="2025-10-07 20:27:52.314075441 +0000 UTC m=+4198.392331052" Oct 07 20:28:22 crc kubenswrapper[4813]: I1007 20:28:22.078965 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:28:22 crc kubenswrapper[4813]: I1007 20:28:22.079593 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:28:52 crc kubenswrapper[4813]: I1007 20:28:52.081054 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:28:52 crc kubenswrapper[4813]: I1007 20:28:52.081563 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:29:07 crc kubenswrapper[4813]: I1007 20:29:07.656596 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f688869c6-w96p7_f241042f-7389-4b62-b934-ac5ac321fcbc/barbican-api/0.log" Oct 07 20:29:07 crc kubenswrapper[4813]: I1007 20:29:07.836212 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f688869c6-w96p7_f241042f-7389-4b62-b934-ac5ac321fcbc/barbican-api-log/0.log" Oct 07 20:29:08 crc kubenswrapper[4813]: I1007 20:29:08.382873 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d696dd678-l487w_584974f4-f44d-4f67-b675-9b0fb29be7f3/barbican-keystone-listener-log/0.log" Oct 07 20:29:08 crc kubenswrapper[4813]: I1007 20:29:08.394150 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d696dd678-l487w_584974f4-f44d-4f67-b675-9b0fb29be7f3/barbican-keystone-listener/0.log" Oct 07 20:29:08 crc kubenswrapper[4813]: I1007 20:29:08.553015 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-664466bb6c-ldqlb_dc06daa5-4a82-4b6c-bc77-2d40de999f15/barbican-worker/0.log" Oct 07 20:29:08 crc kubenswrapper[4813]: I1007 20:29:08.624413 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-664466bb6c-ldqlb_dc06daa5-4a82-4b6c-bc77-2d40de999f15/barbican-worker-log/0.log" Oct 07 20:29:08 crc kubenswrapper[4813]: I1007 20:29:08.830644 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92_f58a4cdc-b5b0-421f-bd28-6c46f3d99af3/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:08 crc kubenswrapper[4813]: I1007 20:29:08.892101 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/ceilometer-central-agent/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.067541 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/ceilometer-notification-agent/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.096688 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/proxy-httpd/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.129903 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/sg-core/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.373148 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ec6f0c69-4799-4be4-b465-19ff21b1f35a/cinder-api-log/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.384913 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ec6f0c69-4799-4be4-b465-19ff21b1f35a/cinder-api/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.894577 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bad7f43d-8146-46b1-a2d4-9c4a23cd4377/cinder-scheduler/0.log" Oct 07 20:29:09 crc kubenswrapper[4813]: I1007 20:29:09.905447 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bad7f43d-8146-46b1-a2d4-9c4a23cd4377/probe/0.log" Oct 07 20:29:10 crc kubenswrapper[4813]: I1007 20:29:10.212450 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6whsv_a6b4cff6-9f92-484a-a556-d7b95dcf455f/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:10 crc kubenswrapper[4813]: I1007 20:29:10.467863 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn_5eda0149-d966-4253-9bb0-0bddbaaa29f1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:10 crc kubenswrapper[4813]: I1007 20:29:10.662986 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh_b27c1155-4bc4-4d5d-b782-418c675819d6/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:10 crc kubenswrapper[4813]: I1007 20:29:10.930602 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bc556cf6f-lzpcx_985bc25a-aeea-4538-bbfe-e2461641e594/init/0.log" Oct 07 20:29:11 crc kubenswrapper[4813]: I1007 20:29:11.254283 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bc556cf6f-lzpcx_985bc25a-aeea-4538-bbfe-e2461641e594/init/0.log" Oct 07 20:29:11 crc kubenswrapper[4813]: I1007 20:29:11.266155 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bc556cf6f-lzpcx_985bc25a-aeea-4538-bbfe-e2461641e594/dnsmasq-dns/0.log" Oct 07 20:29:11 crc kubenswrapper[4813]: I1007 20:29:11.472997 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn_4713cec2-7e5d-4d1b-8436-1cd44794b936/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:11 crc kubenswrapper[4813]: I1007 20:29:11.815231 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e7153372-cc47-4ff1-8481-b04a58c5c587/glance-httpd/0.log" Oct 07 20:29:11 crc kubenswrapper[4813]: I1007 20:29:11.910560 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e7153372-cc47-4ff1-8481-b04a58c5c587/glance-log/0.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.070699 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_84cd7775-d255-44d6-a361-0fd247bb406d/glance-httpd/0.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.082959 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_84cd7775-d255-44d6-a361-0fd247bb406d/glance-log/0.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.239964 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-687ddb5b-lwwn2_a0b0d403-9a0c-407b-a3d4-a0db3e612092/horizon/1.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.441533 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-687ddb5b-lwwn2_a0b0d403-9a0c-407b-a3d4-a0db3e612092/horizon/0.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.577896 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5_4edb32ce-3490-4665-8fde-69010044b237/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.682785 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-687ddb5b-lwwn2_a0b0d403-9a0c-407b-a3d4-a0db3e612092/horizon-log/0.log" Oct 07 20:29:12 crc kubenswrapper[4813]: I1007 20:29:12.785278 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-25k9r_b7876782-6cc3-47e2-ab62-b9082196a5c8/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:13 crc kubenswrapper[4813]: I1007 20:29:13.027287 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29331121-7mg5t_f060a35b-a8f6-4392-82bf-9e557928512c/keystone-cron/0.log" Oct 07 20:29:13 crc kubenswrapper[4813]: I1007 20:29:13.298894 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-68ff4bb5b-nhpkd_dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3/keystone-api/0.log" Oct 07 20:29:13 crc kubenswrapper[4813]: I1007 20:29:13.300815 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ba18b055-6c70-4c3c-b464-8138c86bc3ea/kube-state-metrics/0.log" Oct 07 20:29:13 crc kubenswrapper[4813]: I1007 20:29:13.468114 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-4dn96_a4b24290-359e-4973-bf65-53ca4889870d/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:14 crc kubenswrapper[4813]: I1007 20:29:14.240428 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5496dd8845-nwmf5_013c7ce7-ad1b-4f61-920b-f5c5f685dcd7/neutron-httpd/0.log" Oct 07 20:29:14 crc kubenswrapper[4813]: I1007 20:29:14.309062 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl_d4674843-15aa-4490-a878-bc2853b4457b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:14 crc kubenswrapper[4813]: I1007 20:29:14.389804 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5496dd8845-nwmf5_013c7ce7-ad1b-4f61-920b-f5c5f685dcd7/neutron-api/0.log" Oct 07 20:29:15 crc kubenswrapper[4813]: I1007 20:29:15.373620 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1/nova-cell0-conductor-conductor/0.log" Oct 07 20:29:15 crc kubenswrapper[4813]: I1007 20:29:15.815221 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_efa771aa-3427-4b7e-b8a8-775222785447/nova-api-log/0.log" Oct 07 20:29:15 crc kubenswrapper[4813]: I1007 20:29:15.979059 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_efa771aa-3427-4b7e-b8a8-775222785447/nova-api-api/0.log" Oct 07 20:29:15 crc kubenswrapper[4813]: I1007 20:29:15.992794 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2536ca60-1d39-40b9-a15b-708804ec9fa5/nova-cell1-conductor-conductor/0.log" Oct 07 20:29:16 crc kubenswrapper[4813]: I1007 20:29:16.266363 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_bed232f8-c7a0-446c-8667-0fb3afda3343/nova-cell1-novncproxy-novncproxy/0.log" Oct 07 20:29:17 crc kubenswrapper[4813]: I1007 20:29:17.085060 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-ltrnt_658e18b5-93de-4f7b-962b-fcc403470a2c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:17 crc kubenswrapper[4813]: I1007 20:29:17.198723 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ee05e116-f577-4638-8c15-6fb6ff348eaf/nova-metadata-log/0.log" Oct 07 20:29:17 crc kubenswrapper[4813]: I1007 20:29:17.978195 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e032c1c7-6f6c-4265-9320-0500b815ec64/nova-scheduler-scheduler/0.log" Oct 07 20:29:18 crc kubenswrapper[4813]: I1007 20:29:18.472883 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1a2d18a4-7c93-4743-8f3d-3367a4dd937a/mysql-bootstrap/0.log" Oct 07 20:29:18 crc kubenswrapper[4813]: I1007 20:29:18.613597 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1a2d18a4-7c93-4743-8f3d-3367a4dd937a/mysql-bootstrap/0.log" Oct 07 20:29:18 crc kubenswrapper[4813]: I1007 20:29:18.711429 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1a2d18a4-7c93-4743-8f3d-3367a4dd937a/galera/0.log" Oct 07 20:29:18 crc kubenswrapper[4813]: I1007 20:29:18.788056 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ee05e116-f577-4638-8c15-6fb6ff348eaf/nova-metadata-metadata/0.log" Oct 07 20:29:18 crc kubenswrapper[4813]: I1007 20:29:18.947002 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b/mysql-bootstrap/0.log" Oct 07 20:29:19 crc kubenswrapper[4813]: I1007 20:29:19.262484 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b/mysql-bootstrap/0.log" Oct 07 20:29:19 crc kubenswrapper[4813]: I1007 20:29:19.286387 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b/galera/0.log" Oct 07 20:29:19 crc kubenswrapper[4813]: I1007 20:29:19.496857 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_00a18181-39b8-42bc-8cc9-4518c7a16137/openstackclient/0.log" Oct 07 20:29:19 crc kubenswrapper[4813]: I1007 20:29:19.585743 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jd55f_a162a130-6094-42c0-a3d1-489de4a7fac4/ovn-controller/0.log" Oct 07 20:29:19 crc kubenswrapper[4813]: I1007 20:29:19.778259 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ww8fx_1276e9fd-662d-41f1-8c9d-05abbbfbf0a2/openstack-network-exporter/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.069170 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovsdb-server-init/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.361993 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovsdb-server/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.370152 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovsdb-server-init/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.433721 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovs-vswitchd/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.698497 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-96t9v_adaa6c4f-3899-4644-acb5-81f67417971e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.828285 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d09b3567-cc2a-48cc-b1ea-b0c65fee032d/openstack-network-exporter/0.log" Oct 07 20:29:20 crc kubenswrapper[4813]: I1007 20:29:20.977810 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d09b3567-cc2a-48cc-b1ea-b0c65fee032d/ovn-northd/0.log" Oct 07 20:29:21 crc kubenswrapper[4813]: I1007 20:29:21.104280 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b6f3d5f7-8af0-4f42-ae53-bc7473860346/openstack-network-exporter/0.log" Oct 07 20:29:21 crc kubenswrapper[4813]: I1007 20:29:21.256571 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b6f3d5f7-8af0-4f42-ae53-bc7473860346/ovsdbserver-nb/0.log" Oct 07 20:29:21 crc kubenswrapper[4813]: I1007 20:29:21.487306 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_55f9cac2-ed84-40f8-8bca-f10c774814f7/openstack-network-exporter/0.log" Oct 07 20:29:21 crc kubenswrapper[4813]: I1007 20:29:21.544827 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_55f9cac2-ed84-40f8-8bca-f10c774814f7/ovsdbserver-sb/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.062822 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-bd4864b74-5mp8m_83739b1f-81fa-4e83-baea-f75bae3f1ea5/placement-api/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.078287 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.078368 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.078409 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.079120 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.079166 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" gracePeriod=600 Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.140805 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-bd4864b74-5mp8m_83739b1f-81fa-4e83-baea-f75bae3f1ea5/placement-log/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: E1007 20:29:22.216879 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.237573 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2412d699-edb6-474b-95da-eb29d703dfd4/setup-container/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.496955 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2412d699-edb6-474b-95da-eb29d703dfd4/setup-container/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.513190 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2412d699-edb6-474b-95da-eb29d703dfd4/rabbitmq/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.781425 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d486108c-7921-4770-81bf-b309787cbf5a/setup-container/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.973282 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d486108c-7921-4770-81bf-b309787cbf5a/setup-container/0.log" Oct 07 20:29:22 crc kubenswrapper[4813]: I1007 20:29:22.992096 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d486108c-7921-4770-81bf-b309787cbf5a/rabbitmq/0.log" Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.056045 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" exitCode=0 Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.056084 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2"} Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.056119 4813 scope.go:117] "RemoveContainer" containerID="dded469f21a70f38b4d485cd654250ddc8aee28d17a05bf6d7b63ac0babfc8d3" Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.056756 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:29:23 crc kubenswrapper[4813]: E1007 20:29:23.057106 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.355842 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf_a9b763f6-c95e-4650-8aa4-3f99675f3e48/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.446773 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-tdk5g_947e90ca-70e6-4956-a58b-06c3faf10445/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.701355 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz_f558bb4b-742a-4c7d-bad0-ce2356b9765c/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:23 crc kubenswrapper[4813]: I1007 20:29:23.910367 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4ntxh_ab40c88e-7fbf-44d6-83a6-0bb6be959120/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:24 crc kubenswrapper[4813]: I1007 20:29:24.140881 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-z5kbp_4a506120-df34-41d9-b92a-9e8944c15dcf/ssh-known-hosts-edpm-deployment/0.log" Oct 07 20:29:24 crc kubenswrapper[4813]: I1007 20:29:24.469782 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65d65664c-r46qm_d3c7d72e-ba30-402f-99f1-aff8e4c688ee/proxy-httpd/0.log" Oct 07 20:29:24 crc kubenswrapper[4813]: I1007 20:29:24.764173 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65d65664c-r46qm_d3c7d72e-ba30-402f-99f1-aff8e4c688ee/proxy-server/0.log" Oct 07 20:29:24 crc kubenswrapper[4813]: I1007 20:29:24.974453 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-bj76f_8cee7433-9535-4db0-aa37-e8fc28bdbf94/swift-ring-rebalance/0.log" Oct 07 20:29:25 crc kubenswrapper[4813]: I1007 20:29:25.246518 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-auditor/0.log" Oct 07 20:29:25 crc kubenswrapper[4813]: I1007 20:29:25.390283 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-reaper/0.log" Oct 07 20:29:25 crc kubenswrapper[4813]: I1007 20:29:25.443073 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-replicator/0.log" Oct 07 20:29:25 crc kubenswrapper[4813]: I1007 20:29:25.567509 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-server/0.log" Oct 07 20:29:25 crc kubenswrapper[4813]: I1007 20:29:25.675529 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-auditor/0.log" Oct 07 20:29:25 crc kubenswrapper[4813]: I1007 20:29:25.704978 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-replicator/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.230278 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-updater/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.255356 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-server/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.308819 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-auditor/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.430965 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-expirer/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.513538 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-replicator/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.584355 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-server/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.715294 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-updater/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.723877 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/rsync/0.log" Oct 07 20:29:26 crc kubenswrapper[4813]: I1007 20:29:26.927407 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/swift-recon-cron/0.log" Oct 07 20:29:27 crc kubenswrapper[4813]: I1007 20:29:27.156907 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7_07dc752d-c126-4085-9367-ca8bcee2c1ec/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:27 crc kubenswrapper[4813]: I1007 20:29:27.360447 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136/tempest-tests-tempest-tests-runner/0.log" Oct 07 20:29:27 crc kubenswrapper[4813]: I1007 20:29:27.494826 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_938f4244-0f47-43a8-af88-c2a117af6d37/test-operator-logs-container/0.log" Oct 07 20:29:27 crc kubenswrapper[4813]: I1007 20:29:27.743967 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-fz2km_6db1d1eb-2150-4a66-bdae-015b651da395/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:29:36 crc kubenswrapper[4813]: I1007 20:29:36.602075 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:29:36 crc kubenswrapper[4813]: E1007 20:29:36.602724 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:29:38 crc kubenswrapper[4813]: I1007 20:29:38.780299 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_d330e133-a612-477b-afbd-2af06b9e084d/memcached/0.log" Oct 07 20:29:48 crc kubenswrapper[4813]: I1007 20:29:48.603257 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:29:48 crc kubenswrapper[4813]: E1007 20:29:48.604147 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.169253 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf"] Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.170924 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.176332 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf"] Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.214792 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.215004 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.307868 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-secret-volume\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.308148 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-config-volume\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.308215 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s52lv\" (UniqueName: \"kubernetes.io/projected/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-kube-api-access-s52lv\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.409646 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-secret-volume\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.409724 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-config-volume\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.409773 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s52lv\" (UniqueName: \"kubernetes.io/projected/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-kube-api-access-s52lv\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.411798 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-config-volume\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.426051 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-secret-volume\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.434995 4813 generic.go:334] "Generic (PLEG): container finished" podID="43f4bcdc-4699-46d3-abf7-1e66a6374bff" containerID="6938e811096448d54f8482a2f0751a80edc4d254c1f77838ff83b6397dcc33fe" exitCode=0 Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.435038 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" event={"ID":"43f4bcdc-4699-46d3-abf7-1e66a6374bff","Type":"ContainerDied","Data":"6938e811096448d54f8482a2f0751a80edc4d254c1f77838ff83b6397dcc33fe"} Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.435806 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s52lv\" (UniqueName: \"kubernetes.io/projected/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-kube-api-access-s52lv\") pod \"collect-profiles-29331150-dgwwf\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.536248 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:00 crc kubenswrapper[4813]: I1007 20:30:00.602975 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:30:00 crc kubenswrapper[4813]: E1007 20:30:00.603295 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.145677 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf"] Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.445271 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" event={"ID":"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2","Type":"ContainerStarted","Data":"69cbfa765a45b43853ebab3d35d5fbf6b5ca590f37c0038e1d4390fd929106aa"} Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.445318 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" event={"ID":"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2","Type":"ContainerStarted","Data":"8394c78736ab37d3a8b35b9158a0412853b6e948296058f4553a0dab57f5ca51"} Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.476012 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" podStartSLOduration=1.475987662 podStartE2EDuration="1.475987662s" podCreationTimestamp="2025-10-07 20:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:30:01.461491154 +0000 UTC m=+4327.539746765" watchObservedRunningTime="2025-10-07 20:30:01.475987662 +0000 UTC m=+4327.554243283" Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.526071 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.560244 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-wdnjz"] Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.567709 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-wdnjz"] Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.630235 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zz56\" (UniqueName: \"kubernetes.io/projected/43f4bcdc-4699-46d3-abf7-1e66a6374bff-kube-api-access-2zz56\") pod \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.630523 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43f4bcdc-4699-46d3-abf7-1e66a6374bff-host\") pod \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\" (UID: \"43f4bcdc-4699-46d3-abf7-1e66a6374bff\") " Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.630662 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43f4bcdc-4699-46d3-abf7-1e66a6374bff-host" (OuterVolumeSpecName: "host") pod "43f4bcdc-4699-46d3-abf7-1e66a6374bff" (UID: "43f4bcdc-4699-46d3-abf7-1e66a6374bff"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.631191 4813 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43f4bcdc-4699-46d3-abf7-1e66a6374bff-host\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.636593 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43f4bcdc-4699-46d3-abf7-1e66a6374bff-kube-api-access-2zz56" (OuterVolumeSpecName: "kube-api-access-2zz56") pod "43f4bcdc-4699-46d3-abf7-1e66a6374bff" (UID: "43f4bcdc-4699-46d3-abf7-1e66a6374bff"). InnerVolumeSpecName "kube-api-access-2zz56". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:30:01 crc kubenswrapper[4813]: I1007 20:30:01.733684 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zz56\" (UniqueName: \"kubernetes.io/projected/43f4bcdc-4699-46d3-abf7-1e66a6374bff-kube-api-access-2zz56\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.458796 4813 generic.go:334] "Generic (PLEG): container finished" podID="4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" containerID="69cbfa765a45b43853ebab3d35d5fbf6b5ca590f37c0038e1d4390fd929106aa" exitCode=0 Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.458896 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" event={"ID":"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2","Type":"ContainerDied","Data":"69cbfa765a45b43853ebab3d35d5fbf6b5ca590f37c0038e1d4390fd929106aa"} Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.462157 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a74b248c28781e29a48e2f7310f9c4388b6205fbbe33c8e115fa6dc9748c32a2" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.462253 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-wdnjz" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.614603 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43f4bcdc-4699-46d3-abf7-1e66a6374bff" path="/var/lib/kubelet/pods/43f4bcdc-4699-46d3-abf7-1e66a6374bff/volumes" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.886040 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-9w886"] Oct 07 20:30:02 crc kubenswrapper[4813]: E1007 20:30:02.886588 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43f4bcdc-4699-46d3-abf7-1e66a6374bff" containerName="container-00" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.886615 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="43f4bcdc-4699-46d3-abf7-1e66a6374bff" containerName="container-00" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.886856 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="43f4bcdc-4699-46d3-abf7-1e66a6374bff" containerName="container-00" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.887760 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.960532 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x52pj\" (UniqueName: \"kubernetes.io/projected/53efd6dc-e86f-405e-a901-2012ef12a370-kube-api-access-x52pj\") pod \"crc-debug-9w886\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:02 crc kubenswrapper[4813]: I1007 20:30:02.961100 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53efd6dc-e86f-405e-a901-2012ef12a370-host\") pod \"crc-debug-9w886\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.063201 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x52pj\" (UniqueName: \"kubernetes.io/projected/53efd6dc-e86f-405e-a901-2012ef12a370-kube-api-access-x52pj\") pod \"crc-debug-9w886\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.063462 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53efd6dc-e86f-405e-a901-2012ef12a370-host\") pod \"crc-debug-9w886\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.063584 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53efd6dc-e86f-405e-a901-2012ef12a370-host\") pod \"crc-debug-9w886\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.086428 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x52pj\" (UniqueName: \"kubernetes.io/projected/53efd6dc-e86f-405e-a901-2012ef12a370-kube-api-access-x52pj\") pod \"crc-debug-9w886\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.210687 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.472561 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-9w886" event={"ID":"53efd6dc-e86f-405e-a901-2012ef12a370","Type":"ContainerStarted","Data":"0a18fce0a30f3832a8d10bc5972afefc12257913e1422818d6a18703d944ea84"} Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.473002 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-9w886" event={"ID":"53efd6dc-e86f-405e-a901-2012ef12a370","Type":"ContainerStarted","Data":"7a5fc728a8867bc989bf64484e04ef47907eb179700b3a515d20371d0369ae23"} Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.508792 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9xl6b/crc-debug-9w886" podStartSLOduration=1.508773146 podStartE2EDuration="1.508773146s" podCreationTimestamp="2025-10-07 20:30:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:30:03.500448896 +0000 UTC m=+4329.578704517" watchObservedRunningTime="2025-10-07 20:30:03.508773146 +0000 UTC m=+4329.587028757" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.845353 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.880260 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-secret-volume\") pod \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.880419 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s52lv\" (UniqueName: \"kubernetes.io/projected/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-kube-api-access-s52lv\") pod \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.880466 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-config-volume\") pod \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\" (UID: \"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2\") " Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.881359 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-config-volume" (OuterVolumeSpecName: "config-volume") pod "4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" (UID: "4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.885985 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" (UID: "4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.889597 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-kube-api-access-s52lv" (OuterVolumeSpecName: "kube-api-access-s52lv") pod "4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" (UID: "4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2"). InnerVolumeSpecName "kube-api-access-s52lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.981770 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s52lv\" (UniqueName: \"kubernetes.io/projected/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-kube-api-access-s52lv\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.981990 4813 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-config-volume\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:03 crc kubenswrapper[4813]: I1007 20:30:03.982048 4813 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.481288 4813 generic.go:334] "Generic (PLEG): container finished" podID="53efd6dc-e86f-405e-a901-2012ef12a370" containerID="0a18fce0a30f3832a8d10bc5972afefc12257913e1422818d6a18703d944ea84" exitCode=0 Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.481418 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-9w886" event={"ID":"53efd6dc-e86f-405e-a901-2012ef12a370","Type":"ContainerDied","Data":"0a18fce0a30f3832a8d10bc5972afefc12257913e1422818d6a18703d944ea84"} Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.483063 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" event={"ID":"4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2","Type":"ContainerDied","Data":"8394c78736ab37d3a8b35b9158a0412853b6e948296058f4553a0dab57f5ca51"} Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.483096 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8394c78736ab37d3a8b35b9158a0412853b6e948296058f4553a0dab57f5ca51" Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.483150 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29331150-dgwwf" Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.544530 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz"] Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.552690 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29331105-lzfcz"] Oct 07 20:30:04 crc kubenswrapper[4813]: I1007 20:30:04.615425 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22811987-5993-47b8-88ab-f0665c950567" path="/var/lib/kubelet/pods/22811987-5993-47b8-88ab-f0665c950567/volumes" Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.590247 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.712300 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x52pj\" (UniqueName: \"kubernetes.io/projected/53efd6dc-e86f-405e-a901-2012ef12a370-kube-api-access-x52pj\") pod \"53efd6dc-e86f-405e-a901-2012ef12a370\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.712611 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53efd6dc-e86f-405e-a901-2012ef12a370-host\") pod \"53efd6dc-e86f-405e-a901-2012ef12a370\" (UID: \"53efd6dc-e86f-405e-a901-2012ef12a370\") " Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.712668 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/53efd6dc-e86f-405e-a901-2012ef12a370-host" (OuterVolumeSpecName: "host") pod "53efd6dc-e86f-405e-a901-2012ef12a370" (UID: "53efd6dc-e86f-405e-a901-2012ef12a370"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.713338 4813 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/53efd6dc-e86f-405e-a901-2012ef12a370-host\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.727573 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53efd6dc-e86f-405e-a901-2012ef12a370-kube-api-access-x52pj" (OuterVolumeSpecName: "kube-api-access-x52pj") pod "53efd6dc-e86f-405e-a901-2012ef12a370" (UID: "53efd6dc-e86f-405e-a901-2012ef12a370"). InnerVolumeSpecName "kube-api-access-x52pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:30:05 crc kubenswrapper[4813]: I1007 20:30:05.814659 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x52pj\" (UniqueName: \"kubernetes.io/projected/53efd6dc-e86f-405e-a901-2012ef12a370-kube-api-access-x52pj\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:06 crc kubenswrapper[4813]: I1007 20:30:06.498828 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-9w886" event={"ID":"53efd6dc-e86f-405e-a901-2012ef12a370","Type":"ContainerDied","Data":"7a5fc728a8867bc989bf64484e04ef47907eb179700b3a515d20371d0369ae23"} Oct 07 20:30:06 crc kubenswrapper[4813]: I1007 20:30:06.499142 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a5fc728a8867bc989bf64484e04ef47907eb179700b3a515d20371d0369ae23" Oct 07 20:30:06 crc kubenswrapper[4813]: I1007 20:30:06.499196 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-9w886" Oct 07 20:30:11 crc kubenswrapper[4813]: I1007 20:30:11.069456 4813 scope.go:117] "RemoveContainer" containerID="9b102e0d5827a20c6f9656818feef83638d1d75ba424efc1dad522e5d5ba2f3b" Oct 07 20:30:11 crc kubenswrapper[4813]: I1007 20:30:11.076011 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-9w886"] Oct 07 20:30:11 crc kubenswrapper[4813]: I1007 20:30:11.082940 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-9w886"] Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.327818 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-qhdzp"] Oct 07 20:30:12 crc kubenswrapper[4813]: E1007 20:30:12.329662 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" containerName="collect-profiles" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.329879 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" containerName="collect-profiles" Oct 07 20:30:12 crc kubenswrapper[4813]: E1007 20:30:12.330018 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53efd6dc-e86f-405e-a901-2012ef12a370" containerName="container-00" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.330140 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="53efd6dc-e86f-405e-a901-2012ef12a370" containerName="container-00" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.330661 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="53efd6dc-e86f-405e-a901-2012ef12a370" containerName="container-00" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.330825 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4255c80b-f4b6-4a3c-ba3c-dd1a22e3c9f2" containerName="collect-profiles" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.331962 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.510986 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s785q\" (UniqueName: \"kubernetes.io/projected/6adcb93c-6154-4f46-a72f-ecbde1275001-kube-api-access-s785q\") pod \"crc-debug-qhdzp\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.511101 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6adcb93c-6154-4f46-a72f-ecbde1275001-host\") pod \"crc-debug-qhdzp\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.613816 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s785q\" (UniqueName: \"kubernetes.io/projected/6adcb93c-6154-4f46-a72f-ecbde1275001-kube-api-access-s785q\") pod \"crc-debug-qhdzp\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.613897 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6adcb93c-6154-4f46-a72f-ecbde1275001-host\") pod \"crc-debug-qhdzp\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.614229 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6adcb93c-6154-4f46-a72f-ecbde1275001-host\") pod \"crc-debug-qhdzp\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.626734 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53efd6dc-e86f-405e-a901-2012ef12a370" path="/var/lib/kubelet/pods/53efd6dc-e86f-405e-a901-2012ef12a370/volumes" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.651566 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s785q\" (UniqueName: \"kubernetes.io/projected/6adcb93c-6154-4f46-a72f-ecbde1275001-kube-api-access-s785q\") pod \"crc-debug-qhdzp\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:12 crc kubenswrapper[4813]: I1007 20:30:12.671792 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:13 crc kubenswrapper[4813]: I1007 20:30:13.583361 4813 generic.go:334] "Generic (PLEG): container finished" podID="6adcb93c-6154-4f46-a72f-ecbde1275001" containerID="3b2546d19bc462746a6b990973e77ccfd0f1449a9e13f5da21838d4c5767c75d" exitCode=0 Oct 07 20:30:13 crc kubenswrapper[4813]: I1007 20:30:13.583520 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" event={"ID":"6adcb93c-6154-4f46-a72f-ecbde1275001","Type":"ContainerDied","Data":"3b2546d19bc462746a6b990973e77ccfd0f1449a9e13f5da21838d4c5767c75d"} Oct 07 20:30:13 crc kubenswrapper[4813]: I1007 20:30:13.583843 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" event={"ID":"6adcb93c-6154-4f46-a72f-ecbde1275001","Type":"ContainerStarted","Data":"b8d70aa14f54201b17f056441c2c233d4d6a011f0735ea4fa8955d8cc6181e08"} Oct 07 20:30:13 crc kubenswrapper[4813]: I1007 20:30:13.603667 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:30:13 crc kubenswrapper[4813]: E1007 20:30:13.604425 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:30:13 crc kubenswrapper[4813]: I1007 20:30:13.647041 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-qhdzp"] Oct 07 20:30:13 crc kubenswrapper[4813]: I1007 20:30:13.658265 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xl6b/crc-debug-qhdzp"] Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.701489 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.862012 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6adcb93c-6154-4f46-a72f-ecbde1275001-host\") pod \"6adcb93c-6154-4f46-a72f-ecbde1275001\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.862135 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s785q\" (UniqueName: \"kubernetes.io/projected/6adcb93c-6154-4f46-a72f-ecbde1275001-kube-api-access-s785q\") pod \"6adcb93c-6154-4f46-a72f-ecbde1275001\" (UID: \"6adcb93c-6154-4f46-a72f-ecbde1275001\") " Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.862150 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6adcb93c-6154-4f46-a72f-ecbde1275001-host" (OuterVolumeSpecName: "host") pod "6adcb93c-6154-4f46-a72f-ecbde1275001" (UID: "6adcb93c-6154-4f46-a72f-ecbde1275001"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.862638 4813 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6adcb93c-6154-4f46-a72f-ecbde1275001-host\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.867563 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6adcb93c-6154-4f46-a72f-ecbde1275001-kube-api-access-s785q" (OuterVolumeSpecName: "kube-api-access-s785q") pod "6adcb93c-6154-4f46-a72f-ecbde1275001" (UID: "6adcb93c-6154-4f46-a72f-ecbde1275001"). InnerVolumeSpecName "kube-api-access-s785q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:30:14 crc kubenswrapper[4813]: I1007 20:30:14.965383 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s785q\" (UniqueName: \"kubernetes.io/projected/6adcb93c-6154-4f46-a72f-ecbde1275001-kube-api-access-s785q\") on node \"crc\" DevicePath \"\"" Oct 07 20:30:15 crc kubenswrapper[4813]: I1007 20:30:15.610523 4813 scope.go:117] "RemoveContainer" containerID="3b2546d19bc462746a6b990973e77ccfd0f1449a9e13f5da21838d4c5767c75d" Oct 07 20:30:15 crc kubenswrapper[4813]: I1007 20:30:15.610648 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/crc-debug-qhdzp" Oct 07 20:30:15 crc kubenswrapper[4813]: I1007 20:30:15.898707 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/util/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.205238 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/pull/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.209456 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/util/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.223868 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/pull/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.402689 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/util/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.446442 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/pull/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.488009 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/extract/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.612356 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6adcb93c-6154-4f46-a72f-ecbde1275001" path="/var/lib/kubelet/pods/6adcb93c-6154-4f46-a72f-ecbde1275001/volumes" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.654063 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-fgdgs_e90691e1-eed5-4c60-af67-46cfca160910/kube-rbac-proxy/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.742615 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-fgdgs_e90691e1-eed5-4c60-af67-46cfca160910/manager/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.759031 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-m49nk_8ff43feb-7984-4f63-b5b4-ab460e72ddc8/kube-rbac-proxy/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.948820 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ndtgw_145ac332-1c3f-4aec-8438-0c3d36ca2c67/kube-rbac-proxy/0.log" Oct 07 20:30:16 crc kubenswrapper[4813]: I1007 20:30:16.961108 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-m49nk_8ff43feb-7984-4f63-b5b4-ab460e72ddc8/manager/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.067287 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ndtgw_145ac332-1c3f-4aec-8438-0c3d36ca2c67/manager/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.154652 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-lk6lc_66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2/kube-rbac-proxy/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.230898 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-lk6lc_66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2/manager/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.620670 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-mtpdz_ff4408c4-9269-43c0-8016-520816b8cd5d/kube-rbac-proxy/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.663183 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-mtpdz_ff4408c4-9269-43c0-8016-520816b8cd5d/manager/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.822401 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-zpbgs_0a182939-eba6-4da5-9e36-567b6a2a37c3/kube-rbac-proxy/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.894580 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-zpbgs_0a182939-eba6-4da5-9e36-567b6a2a37c3/manager/0.log" Oct 07 20:30:17 crc kubenswrapper[4813]: I1007 20:30:17.897273 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-gngt9_72b45aa2-2bd2-4339-8a89-5a2910798969/kube-rbac-proxy/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.171752 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-gngt9_72b45aa2-2bd2-4339-8a89-5a2910798969/manager/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.269333 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-662qq_0bac4f10-1d47-40aa-b93e-9a0789801e9b/kube-rbac-proxy/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.302124 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-662qq_0bac4f10-1d47-40aa-b93e-9a0789801e9b/manager/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.644776 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-kjl4l_b487945e-823b-4d95-a1dc-6f7148aa053c/manager/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.689154 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-kjl4l_b487945e-823b-4d95-a1dc-6f7148aa053c/kube-rbac-proxy/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.841045 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v6ggr_eb9b4085-2e2d-4955-bbd3-2c53bcada088/kube-rbac-proxy/0.log" Oct 07 20:30:18 crc kubenswrapper[4813]: I1007 20:30:18.876731 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v6ggr_eb9b4085-2e2d-4955-bbd3-2c53bcada088/manager/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.352803 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-6xkm2_ff2bb528-f133-456a-9e91-5f4ef07a4f2f/manager/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.380220 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-6xkm2_ff2bb528-f133-456a-9e91-5f4ef07a4f2f/kube-rbac-proxy/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.542596 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xzgn5_954d30ae-2fcd-4d29-8d44-a1cf40b56f27/kube-rbac-proxy/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.580094 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xzgn5_954d30ae-2fcd-4d29-8d44-a1cf40b56f27/manager/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.659842 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7m9t5_8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8/kube-rbac-proxy/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.826308 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7m9t5_8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8/manager/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.904264 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-x9rm6_6ae873de-e4da-48cc-9c55-143f61cdf190/kube-rbac-proxy/0.log" Oct 07 20:30:19 crc kubenswrapper[4813]: I1007 20:30:19.915420 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-x9rm6_6ae873de-e4da-48cc-9c55-143f61cdf190/manager/0.log" Oct 07 20:30:20 crc kubenswrapper[4813]: I1007 20:30:20.056470 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q_d39bfd53-3ae2-4fe1-a07e-9592be7062b6/kube-rbac-proxy/0.log" Oct 07 20:30:20 crc kubenswrapper[4813]: I1007 20:30:20.098698 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q_d39bfd53-3ae2-4fe1-a07e-9592be7062b6/manager/0.log" Oct 07 20:30:20 crc kubenswrapper[4813]: I1007 20:30:20.121376 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6589b7f7cf-b6lcq_28f57161-1102-46a9-99a0-67fc1fc2ca33/kube-rbac-proxy/0.log" Oct 07 20:30:20 crc kubenswrapper[4813]: I1007 20:30:20.406914 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-765cf949f-6sxnv_e0480957-44d9-4dcf-915f-ba4db55ad450/kube-rbac-proxy/0.log" Oct 07 20:30:20 crc kubenswrapper[4813]: I1007 20:30:20.663370 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-765cf949f-6sxnv_e0480957-44d9-4dcf-915f-ba4db55ad450/operator/0.log" Oct 07 20:30:20 crc kubenswrapper[4813]: I1007 20:30:20.665649 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-r7css_ec9bfed9-1014-4ae2-ad89-b1815b613369/registry-server/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.025950 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f96f8c84-vv8xc_155009c1-92c2-493c-8969-12710fed4ec0/kube-rbac-proxy/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.094197 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f96f8c84-vv8xc_155009c1-92c2-493c-8969-12710fed4ec0/manager/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.285989 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6589b7f7cf-b6lcq_28f57161-1102-46a9-99a0-67fc1fc2ca33/manager/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.376851 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-rhpjg_60dd68e0-dc15-4515-aab8-91f2cbd44487/kube-rbac-proxy/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.401104 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-rhpjg_60dd68e0-dc15-4515-aab8-91f2cbd44487/manager/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.549700 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5_58a86259-bcad-428f-9d1d-5e8c059403a8/operator/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.649578 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-j2xrx_7f9b49af-fca0-48b3-8291-db67e1597599/kube-rbac-proxy/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.716585 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-j2xrx_7f9b49af-fca0-48b3-8291-db67e1597599/manager/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.787042 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-775776c574-s879n_edbe78c4-559a-4296-a16d-37c92634c84f/kube-rbac-proxy/0.log" Oct 07 20:30:21 crc kubenswrapper[4813]: I1007 20:30:21.950283 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-775776c574-s879n_edbe78c4-559a-4296-a16d-37c92634c84f/manager/0.log" Oct 07 20:30:22 crc kubenswrapper[4813]: I1007 20:30:22.003307 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-74665f6cdc-5p9hd_ceb59888-cd38-4300-93ea-d8f00d0b3b6c/kube-rbac-proxy/0.log" Oct 07 20:30:22 crc kubenswrapper[4813]: I1007 20:30:22.048590 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-74665f6cdc-5p9hd_ceb59888-cd38-4300-93ea-d8f00d0b3b6c/manager/0.log" Oct 07 20:30:22 crc kubenswrapper[4813]: I1007 20:30:22.166501 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5dd4499c96-nhkpb_4ced110c-65fb-4a77-aa0a-1a999a911ec1/kube-rbac-proxy/0.log" Oct 07 20:30:22 crc kubenswrapper[4813]: I1007 20:30:22.246028 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5dd4499c96-nhkpb_4ced110c-65fb-4a77-aa0a-1a999a911ec1/manager/0.log" Oct 07 20:30:27 crc kubenswrapper[4813]: I1007 20:30:27.602415 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:30:27 crc kubenswrapper[4813]: E1007 20:30:27.603283 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:30:38 crc kubenswrapper[4813]: I1007 20:30:38.602486 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:30:38 crc kubenswrapper[4813]: E1007 20:30:38.604078 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:30:39 crc kubenswrapper[4813]: I1007 20:30:39.711901 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-cpgrb_93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2/control-plane-machine-set-operator/0.log" Oct 07 20:30:39 crc kubenswrapper[4813]: I1007 20:30:39.838692 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9crzl_ca47c43c-9e61-4697-b7f5-7cec65e2c992/kube-rbac-proxy/0.log" Oct 07 20:30:39 crc kubenswrapper[4813]: I1007 20:30:39.882437 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9crzl_ca47c43c-9e61-4697-b7f5-7cec65e2c992/machine-api-operator/0.log" Oct 07 20:30:50 crc kubenswrapper[4813]: I1007 20:30:50.603564 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:30:50 crc kubenswrapper[4813]: E1007 20:30:50.604579 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:30:52 crc kubenswrapper[4813]: I1007 20:30:52.826167 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-7lwr4_c1b0f2ad-748f-4212-809f-9e5d658608e5/cert-manager-controller/0.log" Oct 07 20:30:52 crc kubenswrapper[4813]: I1007 20:30:52.889055 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-k7vkh_13884ec5-d712-4cd6-86d3-b1e6059b5fb7/cert-manager-cainjector/0.log" Oct 07 20:30:53 crc kubenswrapper[4813]: I1007 20:30:53.028256 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-5mt2d_1c7224a1-2e4f-4cc4-a127-3791d5c68f6b/cert-manager-webhook/0.log" Oct 07 20:31:04 crc kubenswrapper[4813]: I1007 20:31:04.607809 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:31:04 crc kubenswrapper[4813]: E1007 20:31:04.608478 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:31:07 crc kubenswrapper[4813]: I1007 20:31:07.643570 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-2fdzf_56036d68-a088-4f16-8fce-0c11b7c9c4e3/nmstate-console-plugin/0.log" Oct 07 20:31:08 crc kubenswrapper[4813]: I1007 20:31:08.507763 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-bw2mb_4fe83b69-4076-411a-b34e-fd61c901eb03/nmstate-handler/0.log" Oct 07 20:31:08 crc kubenswrapper[4813]: I1007 20:31:08.534298 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-7mqw5_3525607e-5512-4d19-a0ce-42df574e763a/nmstate-metrics/0.log" Oct 07 20:31:08 crc kubenswrapper[4813]: I1007 20:31:08.571766 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-7mqw5_3525607e-5512-4d19-a0ce-42df574e763a/kube-rbac-proxy/0.log" Oct 07 20:31:08 crc kubenswrapper[4813]: I1007 20:31:08.755911 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-sr7d4_d66b41fa-f25e-4dd5-8f30-f496940d7d19/nmstate-webhook/0.log" Oct 07 20:31:08 crc kubenswrapper[4813]: I1007 20:31:08.799610 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-vx5d8_8350f82b-1b55-4571-83a0-14a18f238c51/nmstate-operator/0.log" Oct 07 20:31:17 crc kubenswrapper[4813]: I1007 20:31:17.603064 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:31:17 crc kubenswrapper[4813]: E1007 20:31:17.603966 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.374801 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-w6bx6_ece86cbe-2002-4e30-bedb-56f9631f5726/kube-rbac-proxy/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.477258 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-w6bx6_ece86cbe-2002-4e30-bedb-56f9631f5726/controller/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.559630 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.759791 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.775213 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.780287 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.802697 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.966994 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.972624 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:31:24 crc kubenswrapper[4813]: I1007 20:31:24.979267 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.024411 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.196547 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.250727 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/controller/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.281884 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.286405 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.414791 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/frr-metrics/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.443806 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/kube-rbac-proxy/0.log" Oct 07 20:31:25 crc kubenswrapper[4813]: I1007 20:31:25.500199 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/kube-rbac-proxy-frr/0.log" Oct 07 20:31:26 crc kubenswrapper[4813]: I1007 20:31:26.130845 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/reloader/0.log" Oct 07 20:31:26 crc kubenswrapper[4813]: I1007 20:31:26.232496 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-m79zg_d5c7c957-5714-4478-874f-1fe2cc7809af/frr-k8s-webhook-server/0.log" Oct 07 20:31:26 crc kubenswrapper[4813]: I1007 20:31:26.439913 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-66cfc88647-54n75_602ff599-0e30-47a2-a316-75053689d031/manager/0.log" Oct 07 20:31:26 crc kubenswrapper[4813]: I1007 20:31:26.654635 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-d65cbb559-4qksm_dc1f8862-6e00-4d65-a90f-f0db7d23cf42/webhook-server/0.log" Oct 07 20:31:26 crc kubenswrapper[4813]: I1007 20:31:26.746620 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/frr/0.log" Oct 07 20:31:26 crc kubenswrapper[4813]: I1007 20:31:26.832960 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-5qpw7_184d7c42-4069-4dbe-a8e7-613da65cfb62/kube-rbac-proxy/0.log" Oct 07 20:31:27 crc kubenswrapper[4813]: I1007 20:31:27.177141 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-5qpw7_184d7c42-4069-4dbe-a8e7-613da65cfb62/speaker/0.log" Oct 07 20:31:30 crc kubenswrapper[4813]: I1007 20:31:30.602729 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:31:30 crc kubenswrapper[4813]: E1007 20:31:30.603598 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:31:39 crc kubenswrapper[4813]: I1007 20:31:39.797091 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/util/0.log" Oct 07 20:31:39 crc kubenswrapper[4813]: I1007 20:31:39.940936 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/util/0.log" Oct 07 20:31:39 crc kubenswrapper[4813]: I1007 20:31:39.968855 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/pull/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.113736 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/pull/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.259634 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/extract/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.367228 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/pull/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.374734 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/util/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.708884 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-utilities/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.891483 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-content/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.937938 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-utilities/0.log" Oct 07 20:31:40 crc kubenswrapper[4813]: I1007 20:31:40.997965 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-content/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.183979 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-content/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.191763 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-utilities/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.629090 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-utilities/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.702787 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/registry-server/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.726586 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-utilities/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.743773 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-content/0.log" Oct 07 20:31:41 crc kubenswrapper[4813]: I1007 20:31:41.856901 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-content/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.033264 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-utilities/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.103074 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-content/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.296475 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/util/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.438841 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/registry-server/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.543944 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/util/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.622869 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/pull/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.670396 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/pull/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.719871 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/pull/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.775915 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/util/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.828942 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/extract/0.log" Oct 07 20:31:42 crc kubenswrapper[4813]: I1007 20:31:42.965985 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-trcm6_54507780-d039-4960-b75e-579f3b0aa7f5/marketplace-operator/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.029337 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-utilities/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.324718 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-utilities/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.362970 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-content/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.374639 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-content/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.583257 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-utilities/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.672095 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-content/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.781092 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/registry-server/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.864213 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-utilities/0.log" Oct 07 20:31:43 crc kubenswrapper[4813]: I1007 20:31:43.995388 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-utilities/0.log" Oct 07 20:31:44 crc kubenswrapper[4813]: I1007 20:31:44.024487 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-content/0.log" Oct 07 20:31:44 crc kubenswrapper[4813]: I1007 20:31:44.040123 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-content/0.log" Oct 07 20:31:44 crc kubenswrapper[4813]: I1007 20:31:44.617641 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:31:44 crc kubenswrapper[4813]: E1007 20:31:44.618034 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:31:44 crc kubenswrapper[4813]: I1007 20:31:44.796581 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-utilities/0.log" Oct 07 20:31:45 crc kubenswrapper[4813]: I1007 20:31:45.237434 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-content/0.log" Oct 07 20:31:45 crc kubenswrapper[4813]: I1007 20:31:45.309943 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/registry-server/0.log" Oct 07 20:31:57 crc kubenswrapper[4813]: I1007 20:31:57.603261 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:31:57 crc kubenswrapper[4813]: E1007 20:31:57.604294 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:32:11 crc kubenswrapper[4813]: I1007 20:32:11.602935 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:32:11 crc kubenswrapper[4813]: E1007 20:32:11.603580 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.577769 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6czxr"] Oct 07 20:32:17 crc kubenswrapper[4813]: E1007 20:32:17.578730 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6adcb93c-6154-4f46-a72f-ecbde1275001" containerName="container-00" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.578744 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6adcb93c-6154-4f46-a72f-ecbde1275001" containerName="container-00" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.578923 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6adcb93c-6154-4f46-a72f-ecbde1275001" containerName="container-00" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.589805 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.599011 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6czxr"] Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.725967 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-utilities\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.726030 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ln2r\" (UniqueName: \"kubernetes.io/projected/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-kube-api-access-9ln2r\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.726062 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-catalog-content\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.827528 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-utilities\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.828113 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ln2r\" (UniqueName: \"kubernetes.io/projected/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-kube-api-access-9ln2r\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.828526 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-catalog-content\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.828062 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-utilities\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.828818 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-catalog-content\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.883467 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ln2r\" (UniqueName: \"kubernetes.io/projected/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-kube-api-access-9ln2r\") pod \"redhat-operators-6czxr\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:17 crc kubenswrapper[4813]: I1007 20:32:17.937668 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:18 crc kubenswrapper[4813]: I1007 20:32:18.472555 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6czxr"] Oct 07 20:32:18 crc kubenswrapper[4813]: I1007 20:32:18.776492 4813 generic.go:334] "Generic (PLEG): container finished" podID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerID="882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066" exitCode=0 Oct 07 20:32:18 crc kubenswrapper[4813]: I1007 20:32:18.776739 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerDied","Data":"882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066"} Oct 07 20:32:18 crc kubenswrapper[4813]: I1007 20:32:18.776764 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerStarted","Data":"56e6a0f085a9f0edef0c2f75df829c5264bbd4351e001eb48e354a212110497a"} Oct 07 20:32:20 crc kubenswrapper[4813]: I1007 20:32:20.796829 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerStarted","Data":"1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153"} Oct 07 20:32:23 crc kubenswrapper[4813]: I1007 20:32:23.602747 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:32:23 crc kubenswrapper[4813]: E1007 20:32:23.603513 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:32:24 crc kubenswrapper[4813]: I1007 20:32:24.829623 4813 generic.go:334] "Generic (PLEG): container finished" podID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerID="1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153" exitCode=0 Oct 07 20:32:24 crc kubenswrapper[4813]: I1007 20:32:24.829739 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerDied","Data":"1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153"} Oct 07 20:32:25 crc kubenswrapper[4813]: I1007 20:32:25.847019 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerStarted","Data":"82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40"} Oct 07 20:32:25 crc kubenswrapper[4813]: I1007 20:32:25.868235 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6czxr" podStartSLOduration=2.379645953 podStartE2EDuration="8.868214596s" podCreationTimestamp="2025-10-07 20:32:17 +0000 UTC" firstStartedPulling="2025-10-07 20:32:18.778156312 +0000 UTC m=+4464.856411923" lastFinishedPulling="2025-10-07 20:32:25.266724955 +0000 UTC m=+4471.344980566" observedRunningTime="2025-10-07 20:32:25.863629704 +0000 UTC m=+4471.941885305" watchObservedRunningTime="2025-10-07 20:32:25.868214596 +0000 UTC m=+4471.946470207" Oct 07 20:32:27 crc kubenswrapper[4813]: I1007 20:32:27.938427 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:27 crc kubenswrapper[4813]: I1007 20:32:27.938744 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:29 crc kubenswrapper[4813]: I1007 20:32:29.003829 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6czxr" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="registry-server" probeResult="failure" output=< Oct 07 20:32:29 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:32:29 crc kubenswrapper[4813]: > Oct 07 20:32:38 crc kubenswrapper[4813]: I1007 20:32:38.029935 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:38 crc kubenswrapper[4813]: I1007 20:32:38.105218 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:38 crc kubenswrapper[4813]: I1007 20:32:38.280803 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6czxr"] Oct 07 20:32:38 crc kubenswrapper[4813]: I1007 20:32:38.603234 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:32:38 crc kubenswrapper[4813]: E1007 20:32:38.603618 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.007851 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6czxr" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="registry-server" containerID="cri-o://82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40" gracePeriod=2 Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.509482 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.598177 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-utilities\") pod \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.598346 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ln2r\" (UniqueName: \"kubernetes.io/projected/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-kube-api-access-9ln2r\") pod \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.599015 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-utilities" (OuterVolumeSpecName: "utilities") pod "ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" (UID: "ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.599262 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-catalog-content\") pod \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\" (UID: \"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea\") " Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.599676 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.608433 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-kube-api-access-9ln2r" (OuterVolumeSpecName: "kube-api-access-9ln2r") pod "ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" (UID: "ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea"). InnerVolumeSpecName "kube-api-access-9ln2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.681995 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" (UID: "ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.701164 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:32:40 crc kubenswrapper[4813]: I1007 20:32:40.701270 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ln2r\" (UniqueName: \"kubernetes.io/projected/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea-kube-api-access-9ln2r\") on node \"crc\" DevicePath \"\"" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.021506 4813 generic.go:334] "Generic (PLEG): container finished" podID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerID="82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40" exitCode=0 Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.022950 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerDied","Data":"82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40"} Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.023094 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6czxr" event={"ID":"ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea","Type":"ContainerDied","Data":"56e6a0f085a9f0edef0c2f75df829c5264bbd4351e001eb48e354a212110497a"} Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.023204 4813 scope.go:117] "RemoveContainer" containerID="82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.023644 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6czxr" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.078468 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6czxr"] Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.083229 4813 scope.go:117] "RemoveContainer" containerID="1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.088610 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6czxr"] Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.366855 4813 scope.go:117] "RemoveContainer" containerID="882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.402091 4813 scope.go:117] "RemoveContainer" containerID="82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40" Oct 07 20:32:41 crc kubenswrapper[4813]: E1007 20:32:41.404168 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40\": container with ID starting with 82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40 not found: ID does not exist" containerID="82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.404206 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40"} err="failed to get container status \"82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40\": rpc error: code = NotFound desc = could not find container \"82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40\": container with ID starting with 82ac3c40bd2b9fee77267b05039b65e49d835beffd316d42f36b9bc352140b40 not found: ID does not exist" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.404231 4813 scope.go:117] "RemoveContainer" containerID="1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153" Oct 07 20:32:41 crc kubenswrapper[4813]: E1007 20:32:41.411510 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153\": container with ID starting with 1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153 not found: ID does not exist" containerID="1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.411565 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153"} err="failed to get container status \"1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153\": rpc error: code = NotFound desc = could not find container \"1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153\": container with ID starting with 1e56301469ad142cf7aaabbc549c7639b3d7287570be03ea2c6527ed96794153 not found: ID does not exist" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.411596 4813 scope.go:117] "RemoveContainer" containerID="882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066" Oct 07 20:32:41 crc kubenswrapper[4813]: E1007 20:32:41.412786 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066\": container with ID starting with 882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066 not found: ID does not exist" containerID="882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066" Oct 07 20:32:41 crc kubenswrapper[4813]: I1007 20:32:41.412818 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066"} err="failed to get container status \"882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066\": rpc error: code = NotFound desc = could not find container \"882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066\": container with ID starting with 882cd0ef25f69695fb19821572953f8f65633f9b3a025e467544145b36b17066 not found: ID does not exist" Oct 07 20:32:42 crc kubenswrapper[4813]: I1007 20:32:42.619144 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" path="/var/lib/kubelet/pods/ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea/volumes" Oct 07 20:32:53 crc kubenswrapper[4813]: I1007 20:32:53.603273 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:32:53 crc kubenswrapper[4813]: E1007 20:32:53.604383 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:33:04 crc kubenswrapper[4813]: I1007 20:33:04.625670 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:33:04 crc kubenswrapper[4813]: E1007 20:33:04.628897 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:33:19 crc kubenswrapper[4813]: I1007 20:33:19.603174 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:33:19 crc kubenswrapper[4813]: E1007 20:33:19.604250 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:33:34 crc kubenswrapper[4813]: I1007 20:33:34.614055 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:33:34 crc kubenswrapper[4813]: E1007 20:33:34.615019 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.006103 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g2dzh"] Oct 07 20:33:49 crc kubenswrapper[4813]: E1007 20:33:49.007124 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="extract-content" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.007141 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="extract-content" Oct 07 20:33:49 crc kubenswrapper[4813]: E1007 20:33:49.007165 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="extract-utilities" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.007172 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="extract-utilities" Oct 07 20:33:49 crc kubenswrapper[4813]: E1007 20:33:49.007190 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="registry-server" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.007197 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="registry-server" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.007419 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad154f4e-8c0f-4eb7-96a3-7ab53d7d15ea" containerName="registry-server" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.008973 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.026962 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g2dzh"] Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.062463 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-catalog-content\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.062554 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhpxf\" (UniqueName: \"kubernetes.io/projected/7366ed22-fc3e-4a63-a998-5c549541d468-kube-api-access-rhpxf\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.062659 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-utilities\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.163796 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-catalog-content\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.163884 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhpxf\" (UniqueName: \"kubernetes.io/projected/7366ed22-fc3e-4a63-a998-5c549541d468-kube-api-access-rhpxf\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.163965 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-utilities\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.164911 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-catalog-content\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.164968 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-utilities\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.184359 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhpxf\" (UniqueName: \"kubernetes.io/projected/7366ed22-fc3e-4a63-a998-5c549541d468-kube-api-access-rhpxf\") pod \"community-operators-g2dzh\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.332096 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.614895 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:33:49 crc kubenswrapper[4813]: E1007 20:33:49.615818 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.656038 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g2dzh"] Oct 07 20:33:49 crc kubenswrapper[4813]: I1007 20:33:49.838904 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerStarted","Data":"5cc9b8d959a26fc9b34a222e481fa09a8d8fe61a578e31d50db821892e63bbfd"} Oct 07 20:33:50 crc kubenswrapper[4813]: I1007 20:33:50.857903 4813 generic.go:334] "Generic (PLEG): container finished" podID="7366ed22-fc3e-4a63-a998-5c549541d468" containerID="ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5" exitCode=0 Oct 07 20:33:50 crc kubenswrapper[4813]: I1007 20:33:50.858275 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerDied","Data":"ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5"} Oct 07 20:33:50 crc kubenswrapper[4813]: I1007 20:33:50.861463 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 20:33:51 crc kubenswrapper[4813]: I1007 20:33:51.872977 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerStarted","Data":"bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c"} Oct 07 20:33:53 crc kubenswrapper[4813]: I1007 20:33:53.905154 4813 generic.go:334] "Generic (PLEG): container finished" podID="7366ed22-fc3e-4a63-a998-5c549541d468" containerID="bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c" exitCode=0 Oct 07 20:33:53 crc kubenswrapper[4813]: I1007 20:33:53.905571 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerDied","Data":"bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c"} Oct 07 20:33:54 crc kubenswrapper[4813]: I1007 20:33:54.917168 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerStarted","Data":"add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209"} Oct 07 20:33:59 crc kubenswrapper[4813]: I1007 20:33:59.332395 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:59 crc kubenswrapper[4813]: I1007 20:33:59.333101 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:59 crc kubenswrapper[4813]: I1007 20:33:59.419948 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:33:59 crc kubenswrapper[4813]: I1007 20:33:59.451076 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g2dzh" podStartSLOduration=7.960445077 podStartE2EDuration="11.451048943s" podCreationTimestamp="2025-10-07 20:33:48 +0000 UTC" firstStartedPulling="2025-10-07 20:33:50.861104479 +0000 UTC m=+4556.939360100" lastFinishedPulling="2025-10-07 20:33:54.351708345 +0000 UTC m=+4560.429963966" observedRunningTime="2025-10-07 20:33:54.943811514 +0000 UTC m=+4561.022067165" watchObservedRunningTime="2025-10-07 20:33:59.451048943 +0000 UTC m=+4565.529304594" Oct 07 20:34:00 crc kubenswrapper[4813]: I1007 20:34:00.269018 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:34:00 crc kubenswrapper[4813]: I1007 20:34:00.318588 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g2dzh"] Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.001655 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g2dzh" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="registry-server" containerID="cri-o://add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209" gracePeriod=2 Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.540836 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.577472 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-utilities\") pod \"7366ed22-fc3e-4a63-a998-5c549541d468\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.577555 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-catalog-content\") pod \"7366ed22-fc3e-4a63-a998-5c549541d468\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.577601 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhpxf\" (UniqueName: \"kubernetes.io/projected/7366ed22-fc3e-4a63-a998-5c549541d468-kube-api-access-rhpxf\") pod \"7366ed22-fc3e-4a63-a998-5c549541d468\" (UID: \"7366ed22-fc3e-4a63-a998-5c549541d468\") " Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.585057 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-utilities" (OuterVolumeSpecName: "utilities") pod "7366ed22-fc3e-4a63-a998-5c549541d468" (UID: "7366ed22-fc3e-4a63-a998-5c549541d468"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.611545 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7366ed22-fc3e-4a63-a998-5c549541d468-kube-api-access-rhpxf" (OuterVolumeSpecName: "kube-api-access-rhpxf") pod "7366ed22-fc3e-4a63-a998-5c549541d468" (UID: "7366ed22-fc3e-4a63-a998-5c549541d468"). InnerVolumeSpecName "kube-api-access-rhpxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.680261 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.680298 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhpxf\" (UniqueName: \"kubernetes.io/projected/7366ed22-fc3e-4a63-a998-5c549541d468-kube-api-access-rhpxf\") on node \"crc\" DevicePath \"\"" Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.696779 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7366ed22-fc3e-4a63-a998-5c549541d468" (UID: "7366ed22-fc3e-4a63-a998-5c549541d468"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:34:02 crc kubenswrapper[4813]: I1007 20:34:02.781720 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7366ed22-fc3e-4a63-a998-5c549541d468-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.010916 4813 generic.go:334] "Generic (PLEG): container finished" podID="7366ed22-fc3e-4a63-a998-5c549541d468" containerID="add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209" exitCode=0 Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.010955 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerDied","Data":"add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209"} Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.010986 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g2dzh" event={"ID":"7366ed22-fc3e-4a63-a998-5c549541d468","Type":"ContainerDied","Data":"5cc9b8d959a26fc9b34a222e481fa09a8d8fe61a578e31d50db821892e63bbfd"} Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.011003 4813 scope.go:117] "RemoveContainer" containerID="add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.011090 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g2dzh" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.031427 4813 scope.go:117] "RemoveContainer" containerID="bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.075927 4813 scope.go:117] "RemoveContainer" containerID="ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.084392 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g2dzh"] Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.103308 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g2dzh"] Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.139756 4813 scope.go:117] "RemoveContainer" containerID="add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209" Oct 07 20:34:03 crc kubenswrapper[4813]: E1007 20:34:03.140387 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209\": container with ID starting with add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209 not found: ID does not exist" containerID="add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.140524 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209"} err="failed to get container status \"add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209\": rpc error: code = NotFound desc = could not find container \"add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209\": container with ID starting with add6f5edea744f871c4125a8cb188adbb562137375bdf096d3fff7a3f6d9e209 not found: ID does not exist" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.140627 4813 scope.go:117] "RemoveContainer" containerID="bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c" Oct 07 20:34:03 crc kubenswrapper[4813]: E1007 20:34:03.141227 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c\": container with ID starting with bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c not found: ID does not exist" containerID="bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.141261 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c"} err="failed to get container status \"bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c\": rpc error: code = NotFound desc = could not find container \"bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c\": container with ID starting with bff32dfa64dc05ed6a0afbaac3aed9d3ab890c4842129f4456984122333ad35c not found: ID does not exist" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.141282 4813 scope.go:117] "RemoveContainer" containerID="ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5" Oct 07 20:34:03 crc kubenswrapper[4813]: E1007 20:34:03.141607 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5\": container with ID starting with ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5 not found: ID does not exist" containerID="ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5" Oct 07 20:34:03 crc kubenswrapper[4813]: I1007 20:34:03.141630 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5"} err="failed to get container status \"ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5\": rpc error: code = NotFound desc = could not find container \"ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5\": container with ID starting with ed96ceccb5ef5dc18066f3af05f3d2a4bee5bb663a1d2650af922abfc4d525c5 not found: ID does not exist" Oct 07 20:34:04 crc kubenswrapper[4813]: I1007 20:34:04.619833 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" path="/var/lib/kubelet/pods/7366ed22-fc3e-4a63-a998-5c549541d468/volumes" Oct 07 20:34:04 crc kubenswrapper[4813]: I1007 20:34:04.623691 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:34:04 crc kubenswrapper[4813]: E1007 20:34:04.624279 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:34:11 crc kubenswrapper[4813]: I1007 20:34:11.267297 4813 scope.go:117] "RemoveContainer" containerID="6938e811096448d54f8482a2f0751a80edc4d254c1f77838ff83b6397dcc33fe" Oct 07 20:34:15 crc kubenswrapper[4813]: I1007 20:34:15.603585 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:34:15 crc kubenswrapper[4813]: E1007 20:34:15.604975 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:34:17 crc kubenswrapper[4813]: I1007 20:34:17.253957 4813 generic.go:334] "Generic (PLEG): container finished" podID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerID="7d250d368f38b7bdbafb45773ee82814946e5c0c8c582c069992378b6a5bc61c" exitCode=0 Oct 07 20:34:17 crc kubenswrapper[4813]: I1007 20:34:17.254044 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xl6b/must-gather-h7j48" event={"ID":"631e40ec-babd-47ce-84e3-971caf0bbfdf","Type":"ContainerDied","Data":"7d250d368f38b7bdbafb45773ee82814946e5c0c8c582c069992378b6a5bc61c"} Oct 07 20:34:17 crc kubenswrapper[4813]: I1007 20:34:17.255015 4813 scope.go:117] "RemoveContainer" containerID="7d250d368f38b7bdbafb45773ee82814946e5c0c8c582c069992378b6a5bc61c" Oct 07 20:34:18 crc kubenswrapper[4813]: I1007 20:34:18.215936 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xl6b_must-gather-h7j48_631e40ec-babd-47ce-84e3-971caf0bbfdf/gather/0.log" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.128475 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xl6b/must-gather-h7j48"] Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.129487 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9xl6b/must-gather-h7j48" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="copy" containerID="cri-o://af270c345d3f75ab5726427ddbfaae04cb1c5d2b2a87e3fc6be05874a5e19c96" gracePeriod=2 Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.139757 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xl6b/must-gather-h7j48"] Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.390215 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xl6b_must-gather-h7j48_631e40ec-babd-47ce-84e3-971caf0bbfdf/copy/0.log" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.390729 4813 generic.go:334] "Generic (PLEG): container finished" podID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerID="af270c345d3f75ab5726427ddbfaae04cb1c5d2b2a87e3fc6be05874a5e19c96" exitCode=143 Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.569698 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xl6b_must-gather-h7j48_631e40ec-babd-47ce-84e3-971caf0bbfdf/copy/0.log" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.570180 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.641264 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/631e40ec-babd-47ce-84e3-971caf0bbfdf-must-gather-output\") pod \"631e40ec-babd-47ce-84e3-971caf0bbfdf\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.641654 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wfjb\" (UniqueName: \"kubernetes.io/projected/631e40ec-babd-47ce-84e3-971caf0bbfdf-kube-api-access-8wfjb\") pod \"631e40ec-babd-47ce-84e3-971caf0bbfdf\" (UID: \"631e40ec-babd-47ce-84e3-971caf0bbfdf\") " Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.667921 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/631e40ec-babd-47ce-84e3-971caf0bbfdf-kube-api-access-8wfjb" (OuterVolumeSpecName: "kube-api-access-8wfjb") pod "631e40ec-babd-47ce-84e3-971caf0bbfdf" (UID: "631e40ec-babd-47ce-84e3-971caf0bbfdf"). InnerVolumeSpecName "kube-api-access-8wfjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.744333 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wfjb\" (UniqueName: \"kubernetes.io/projected/631e40ec-babd-47ce-84e3-971caf0bbfdf-kube-api-access-8wfjb\") on node \"crc\" DevicePath \"\"" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.866076 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/631e40ec-babd-47ce-84e3-971caf0bbfdf-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "631e40ec-babd-47ce-84e3-971caf0bbfdf" (UID: "631e40ec-babd-47ce-84e3-971caf0bbfdf"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:34:27 crc kubenswrapper[4813]: I1007 20:34:27.947484 4813 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/631e40ec-babd-47ce-84e3-971caf0bbfdf-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 20:34:28 crc kubenswrapper[4813]: I1007 20:34:28.407874 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xl6b_must-gather-h7j48_631e40ec-babd-47ce-84e3-971caf0bbfdf/copy/0.log" Oct 07 20:34:28 crc kubenswrapper[4813]: I1007 20:34:28.408367 4813 scope.go:117] "RemoveContainer" containerID="af270c345d3f75ab5726427ddbfaae04cb1c5d2b2a87e3fc6be05874a5e19c96" Oct 07 20:34:28 crc kubenswrapper[4813]: I1007 20:34:28.408503 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xl6b/must-gather-h7j48" Oct 07 20:34:28 crc kubenswrapper[4813]: I1007 20:34:28.434405 4813 scope.go:117] "RemoveContainer" containerID="7d250d368f38b7bdbafb45773ee82814946e5c0c8c582c069992378b6a5bc61c" Oct 07 20:34:28 crc kubenswrapper[4813]: I1007 20:34:28.613979 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" path="/var/lib/kubelet/pods/631e40ec-babd-47ce-84e3-971caf0bbfdf/volumes" Oct 07 20:34:29 crc kubenswrapper[4813]: I1007 20:34:29.603298 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:34:30 crc kubenswrapper[4813]: I1007 20:34:30.431589 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"cc892fa0043484caa2f6d2787f88fed7a1e1635a66e9a8e0e51588d6df4363cf"} Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.791109 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jdm8p/must-gather-g9n48"] Oct 07 20:35:06 crc kubenswrapper[4813]: E1007 20:35:06.791868 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="copy" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.791881 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="copy" Oct 07 20:35:06 crc kubenswrapper[4813]: E1007 20:35:06.791913 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="registry-server" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.791919 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="registry-server" Oct 07 20:35:06 crc kubenswrapper[4813]: E1007 20:35:06.791935 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="extract-content" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.791941 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="extract-content" Oct 07 20:35:06 crc kubenswrapper[4813]: E1007 20:35:06.791955 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="gather" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.791961 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="gather" Oct 07 20:35:06 crc kubenswrapper[4813]: E1007 20:35:06.791974 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="extract-utilities" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.791980 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="extract-utilities" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.792149 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="7366ed22-fc3e-4a63-a998-5c549541d468" containerName="registry-server" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.792161 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="copy" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.792180 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="631e40ec-babd-47ce-84e3-971caf0bbfdf" containerName="gather" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.793072 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.795988 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jdm8p"/"openshift-service-ca.crt" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.795994 4813 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jdm8p"/"kube-root-ca.crt" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.834180 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jdm8p/must-gather-g9n48"] Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.905724 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b27f1b7-5f74-4e67-8a17-5ba84c954120-must-gather-output\") pod \"must-gather-g9n48\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:06 crc kubenswrapper[4813]: I1007 20:35:06.906129 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp687\" (UniqueName: \"kubernetes.io/projected/4b27f1b7-5f74-4e67-8a17-5ba84c954120-kube-api-access-mp687\") pod \"must-gather-g9n48\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.007492 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp687\" (UniqueName: \"kubernetes.io/projected/4b27f1b7-5f74-4e67-8a17-5ba84c954120-kube-api-access-mp687\") pod \"must-gather-g9n48\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.007856 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b27f1b7-5f74-4e67-8a17-5ba84c954120-must-gather-output\") pod \"must-gather-g9n48\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.008354 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b27f1b7-5f74-4e67-8a17-5ba84c954120-must-gather-output\") pod \"must-gather-g9n48\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.024754 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp687\" (UniqueName: \"kubernetes.io/projected/4b27f1b7-5f74-4e67-8a17-5ba84c954120-kube-api-access-mp687\") pod \"must-gather-g9n48\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.113444 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.601125 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jdm8p/must-gather-g9n48"] Oct 07 20:35:07 crc kubenswrapper[4813]: I1007 20:35:07.838606 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/must-gather-g9n48" event={"ID":"4b27f1b7-5f74-4e67-8a17-5ba84c954120","Type":"ContainerStarted","Data":"0c07dc12440c2869d159927e09855e8836800643cbb70919d99f3b5805c72340"} Oct 07 20:35:08 crc kubenswrapper[4813]: I1007 20:35:08.857283 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/must-gather-g9n48" event={"ID":"4b27f1b7-5f74-4e67-8a17-5ba84c954120","Type":"ContainerStarted","Data":"33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0"} Oct 07 20:35:08 crc kubenswrapper[4813]: I1007 20:35:08.857379 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/must-gather-g9n48" event={"ID":"4b27f1b7-5f74-4e67-8a17-5ba84c954120","Type":"ContainerStarted","Data":"20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9"} Oct 07 20:35:08 crc kubenswrapper[4813]: I1007 20:35:08.880501 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jdm8p/must-gather-g9n48" podStartSLOduration=2.880473771 podStartE2EDuration="2.880473771s" podCreationTimestamp="2025-10-07 20:35:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:35:08.877177477 +0000 UTC m=+4634.955433108" watchObservedRunningTime="2025-10-07 20:35:08.880473771 +0000 UTC m=+4634.958729412" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.363740 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-q9kbw"] Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.365289 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.368465 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jdm8p"/"default-dockercfg-k9x4g" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.490446 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-host\") pod \"crc-debug-q9kbw\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.490551 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nnbb\" (UniqueName: \"kubernetes.io/projected/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-kube-api-access-7nnbb\") pod \"crc-debug-q9kbw\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.592165 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-host\") pod \"crc-debug-q9kbw\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.592235 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nnbb\" (UniqueName: \"kubernetes.io/projected/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-kube-api-access-7nnbb\") pod \"crc-debug-q9kbw\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.592307 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-host\") pod \"crc-debug-q9kbw\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.610088 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nnbb\" (UniqueName: \"kubernetes.io/projected/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-kube-api-access-7nnbb\") pod \"crc-debug-q9kbw\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.681212 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:35:11 crc kubenswrapper[4813]: W1007 20:35:11.706411 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84542aa1_68cd_4ec8_80c0_9deea1fb45ee.slice/crio-d57631ff97398a50104e4fec6bf332371ccbdd64b7d9c904e106b3c895fd70e3 WatchSource:0}: Error finding container d57631ff97398a50104e4fec6bf332371ccbdd64b7d9c904e106b3c895fd70e3: Status 404 returned error can't find the container with id d57631ff97398a50104e4fec6bf332371ccbdd64b7d9c904e106b3c895fd70e3 Oct 07 20:35:11 crc kubenswrapper[4813]: I1007 20:35:11.893532 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" event={"ID":"84542aa1-68cd-4ec8-80c0-9deea1fb45ee","Type":"ContainerStarted","Data":"d57631ff97398a50104e4fec6bf332371ccbdd64b7d9c904e106b3c895fd70e3"} Oct 07 20:35:12 crc kubenswrapper[4813]: I1007 20:35:12.904591 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" event={"ID":"84542aa1-68cd-4ec8-80c0-9deea1fb45ee","Type":"ContainerStarted","Data":"26e489c6191edc565985f538da9d781f32c6f53075084141f79b0769ea65053e"} Oct 07 20:35:12 crc kubenswrapper[4813]: I1007 20:35:12.922030 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" podStartSLOduration=1.922008504 podStartE2EDuration="1.922008504s" podCreationTimestamp="2025-10-07 20:35:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:35:12.917770732 +0000 UTC m=+4638.996026353" watchObservedRunningTime="2025-10-07 20:35:12.922008504 +0000 UTC m=+4639.000264125" Oct 07 20:36:11 crc kubenswrapper[4813]: I1007 20:36:11.468514 4813 scope.go:117] "RemoveContainer" containerID="0a18fce0a30f3832a8d10bc5972afefc12257913e1422818d6a18703d944ea84" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.390039 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m5pgt"] Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.395221 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.415048 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5pgt"] Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.538759 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-utilities\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.539998 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vpzs\" (UniqueName: \"kubernetes.io/projected/6c98257c-0861-40e1-bbd9-9d0a80f1846c-kube-api-access-4vpzs\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.540249 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-catalog-content\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.642845 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-catalog-content\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.643239 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-utilities\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.643415 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vpzs\" (UniqueName: \"kubernetes.io/projected/6c98257c-0861-40e1-bbd9-9d0a80f1846c-kube-api-access-4vpzs\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.643751 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-catalog-content\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.643819 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-utilities\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.668094 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vpzs\" (UniqueName: \"kubernetes.io/projected/6c98257c-0861-40e1-bbd9-9d0a80f1846c-kube-api-access-4vpzs\") pod \"redhat-marketplace-m5pgt\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:28 crc kubenswrapper[4813]: I1007 20:36:28.740350 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:29 crc kubenswrapper[4813]: I1007 20:36:29.413448 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5pgt"] Oct 07 20:36:29 crc kubenswrapper[4813]: W1007 20:36:29.837521 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c98257c_0861_40e1_bbd9_9d0a80f1846c.slice/crio-6566b0d5ae5d68fe276cc52ade45b1533fdc37691bdda1ea8f8680046c2f4f8f WatchSource:0}: Error finding container 6566b0d5ae5d68fe276cc52ade45b1533fdc37691bdda1ea8f8680046c2f4f8f: Status 404 returned error can't find the container with id 6566b0d5ae5d68fe276cc52ade45b1533fdc37691bdda1ea8f8680046c2f4f8f Oct 07 20:36:30 crc kubenswrapper[4813]: I1007 20:36:30.620293 4813 generic.go:334] "Generic (PLEG): container finished" podID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerID="ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f" exitCode=0 Oct 07 20:36:30 crc kubenswrapper[4813]: I1007 20:36:30.621690 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5pgt" event={"ID":"6c98257c-0861-40e1-bbd9-9d0a80f1846c","Type":"ContainerDied","Data":"ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f"} Oct 07 20:36:30 crc kubenswrapper[4813]: I1007 20:36:30.621739 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5pgt" event={"ID":"6c98257c-0861-40e1-bbd9-9d0a80f1846c","Type":"ContainerStarted","Data":"6566b0d5ae5d68fe276cc52ade45b1533fdc37691bdda1ea8f8680046c2f4f8f"} Oct 07 20:36:32 crc kubenswrapper[4813]: I1007 20:36:32.639952 4813 generic.go:334] "Generic (PLEG): container finished" podID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerID="13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d" exitCode=0 Oct 07 20:36:32 crc kubenswrapper[4813]: I1007 20:36:32.640010 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5pgt" event={"ID":"6c98257c-0861-40e1-bbd9-9d0a80f1846c","Type":"ContainerDied","Data":"13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d"} Oct 07 20:36:33 crc kubenswrapper[4813]: I1007 20:36:33.653825 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5pgt" event={"ID":"6c98257c-0861-40e1-bbd9-9d0a80f1846c","Type":"ContainerStarted","Data":"80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c"} Oct 07 20:36:33 crc kubenswrapper[4813]: I1007 20:36:33.683131 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m5pgt" podStartSLOduration=3.282832574 podStartE2EDuration="5.683116092s" podCreationTimestamp="2025-10-07 20:36:28 +0000 UTC" firstStartedPulling="2025-10-07 20:36:30.624637545 +0000 UTC m=+4716.702893156" lastFinishedPulling="2025-10-07 20:36:33.024921033 +0000 UTC m=+4719.103176674" observedRunningTime="2025-10-07 20:36:33.677186491 +0000 UTC m=+4719.755442092" watchObservedRunningTime="2025-10-07 20:36:33.683116092 +0000 UTC m=+4719.761371703" Oct 07 20:36:37 crc kubenswrapper[4813]: I1007 20:36:37.058855 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f688869c6-w96p7_f241042f-7389-4b62-b934-ac5ac321fcbc/barbican-api/0.log" Oct 07 20:36:37 crc kubenswrapper[4813]: I1007 20:36:37.348031 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f688869c6-w96p7_f241042f-7389-4b62-b934-ac5ac321fcbc/barbican-api-log/0.log" Oct 07 20:36:37 crc kubenswrapper[4813]: I1007 20:36:37.658676 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d696dd678-l487w_584974f4-f44d-4f67-b675-9b0fb29be7f3/barbican-keystone-listener/0.log" Oct 07 20:36:37 crc kubenswrapper[4813]: I1007 20:36:37.765903 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d696dd678-l487w_584974f4-f44d-4f67-b675-9b0fb29be7f3/barbican-keystone-listener-log/0.log" Oct 07 20:36:37 crc kubenswrapper[4813]: I1007 20:36:37.946932 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-664466bb6c-ldqlb_dc06daa5-4a82-4b6c-bc77-2d40de999f15/barbican-worker/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.043081 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-664466bb6c-ldqlb_dc06daa5-4a82-4b6c-bc77-2d40de999f15/barbican-worker-log/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.216433 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-mbs92_f58a4cdc-b5b0-421f-bd28-6c46f3d99af3/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.462651 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/ceilometer-central-agent/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.473013 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/ceilometer-notification-agent/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.528378 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/proxy-httpd/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.706190 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_e4331b72-d366-4e3d-972d-419bacf0d2f2/sg-core/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.741240 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.741289 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.795583 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.841264 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ec6f0c69-4799-4be4-b465-19ff21b1f35a/cinder-api/0.log" Oct 07 20:36:38 crc kubenswrapper[4813]: I1007 20:36:38.980506 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ec6f0c69-4799-4be4-b465-19ff21b1f35a/cinder-api-log/0.log" Oct 07 20:36:39 crc kubenswrapper[4813]: I1007 20:36:39.681708 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bad7f43d-8146-46b1-a2d4-9c4a23cd4377/cinder-scheduler/0.log" Oct 07 20:36:39 crc kubenswrapper[4813]: I1007 20:36:39.714801 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bad7f43d-8146-46b1-a2d4-9c4a23cd4377/probe/0.log" Oct 07 20:36:39 crc kubenswrapper[4813]: I1007 20:36:39.750640 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:39 crc kubenswrapper[4813]: I1007 20:36:39.795804 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5pgt"] Oct 07 20:36:39 crc kubenswrapper[4813]: I1007 20:36:39.998360 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6whsv_a6b4cff6-9f92-484a-a556-d7b95dcf455f/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:40 crc kubenswrapper[4813]: I1007 20:36:40.110075 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-6bcwn_5eda0149-d966-4253-9bb0-0bddbaaa29f1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:40 crc kubenswrapper[4813]: I1007 20:36:40.329661 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-z6hvh_b27c1155-4bc4-4d5d-b782-418c675819d6/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:40 crc kubenswrapper[4813]: I1007 20:36:40.511382 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bc556cf6f-lzpcx_985bc25a-aeea-4538-bbfe-e2461641e594/init/0.log" Oct 07 20:36:40 crc kubenswrapper[4813]: I1007 20:36:40.635994 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bc556cf6f-lzpcx_985bc25a-aeea-4538-bbfe-e2461641e594/init/0.log" Oct 07 20:36:40 crc kubenswrapper[4813]: I1007 20:36:40.812798 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6bc556cf6f-lzpcx_985bc25a-aeea-4538-bbfe-e2461641e594/dnsmasq-dns/0.log" Oct 07 20:36:41 crc kubenswrapper[4813]: I1007 20:36:41.298877 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tvqxn_4713cec2-7e5d-4d1b-8436-1cd44794b936/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:41 crc kubenswrapper[4813]: I1007 20:36:41.595971 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e7153372-cc47-4ff1-8481-b04a58c5c587/glance-httpd/0.log" Oct 07 20:36:41 crc kubenswrapper[4813]: I1007 20:36:41.709873 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m5pgt" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="registry-server" containerID="cri-o://80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c" gracePeriod=2 Oct 07 20:36:41 crc kubenswrapper[4813]: I1007 20:36:41.962289 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e7153372-cc47-4ff1-8481-b04a58c5c587/glance-log/0.log" Oct 07 20:36:41 crc kubenswrapper[4813]: I1007 20:36:41.982227 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_84cd7775-d255-44d6-a361-0fd247bb406d/glance-log/0.log" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.072626 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_84cd7775-d255-44d6-a361-0fd247bb406d/glance-httpd/0.log" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.206793 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.261308 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-687ddb5b-lwwn2_a0b0d403-9a0c-407b-a3d4-a0db3e612092/horizon/0.log" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.312958 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-utilities\") pod \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.313039 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vpzs\" (UniqueName: \"kubernetes.io/projected/6c98257c-0861-40e1-bbd9-9d0a80f1846c-kube-api-access-4vpzs\") pod \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.313069 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-catalog-content\") pod \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\" (UID: \"6c98257c-0861-40e1-bbd9-9d0a80f1846c\") " Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.313802 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-utilities" (OuterVolumeSpecName: "utilities") pod "6c98257c-0861-40e1-bbd9-9d0a80f1846c" (UID: "6c98257c-0861-40e1-bbd9-9d0a80f1846c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.331885 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c98257c-0861-40e1-bbd9-9d0a80f1846c-kube-api-access-4vpzs" (OuterVolumeSpecName: "kube-api-access-4vpzs") pod "6c98257c-0861-40e1-bbd9-9d0a80f1846c" (UID: "6c98257c-0861-40e1-bbd9-9d0a80f1846c"). InnerVolumeSpecName "kube-api-access-4vpzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.339006 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c98257c-0861-40e1-bbd9-9d0a80f1846c" (UID: "6c98257c-0861-40e1-bbd9-9d0a80f1846c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.403097 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-687ddb5b-lwwn2_a0b0d403-9a0c-407b-a3d4-a0db3e612092/horizon/1.log" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.416429 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.416463 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vpzs\" (UniqueName: \"kubernetes.io/projected/6c98257c-0861-40e1-bbd9-9d0a80f1846c-kube-api-access-4vpzs\") on node \"crc\" DevicePath \"\"" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.416472 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c98257c-0861-40e1-bbd9-9d0a80f1846c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.717956 4813 generic.go:334] "Generic (PLEG): container finished" podID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerID="80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c" exitCode=0 Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.718176 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5pgt" event={"ID":"6c98257c-0861-40e1-bbd9-9d0a80f1846c","Type":"ContainerDied","Data":"80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c"} Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.718308 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5pgt" event={"ID":"6c98257c-0861-40e1-bbd9-9d0a80f1846c","Type":"ContainerDied","Data":"6566b0d5ae5d68fe276cc52ade45b1533fdc37691bdda1ea8f8680046c2f4f8f"} Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.718397 4813 scope.go:117] "RemoveContainer" containerID="80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.718469 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5pgt" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.746469 4813 scope.go:117] "RemoveContainer" containerID="13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.751295 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5pgt"] Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.758442 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5pgt"] Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.777893 4813 scope.go:117] "RemoveContainer" containerID="ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.815364 4813 scope.go:117] "RemoveContainer" containerID="80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c" Oct 07 20:36:42 crc kubenswrapper[4813]: E1007 20:36:42.815793 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c\": container with ID starting with 80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c not found: ID does not exist" containerID="80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.815821 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c"} err="failed to get container status \"80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c\": rpc error: code = NotFound desc = could not find container \"80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c\": container with ID starting with 80fce5274808a6fcdc5ec24e470600cc79f2aaa6e41e16f0b95d6921c43e408c not found: ID does not exist" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.815853 4813 scope.go:117] "RemoveContainer" containerID="13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d" Oct 07 20:36:42 crc kubenswrapper[4813]: E1007 20:36:42.822446 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d\": container with ID starting with 13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d not found: ID does not exist" containerID="13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.822478 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d"} err="failed to get container status \"13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d\": rpc error: code = NotFound desc = could not find container \"13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d\": container with ID starting with 13fd58f47e857fc5a9322a3396708edb88fbdfdc50a22b1394713ebb6f2e331d not found: ID does not exist" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.822496 4813 scope.go:117] "RemoveContainer" containerID="ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f" Oct 07 20:36:42 crc kubenswrapper[4813]: E1007 20:36:42.823748 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f\": container with ID starting with ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f not found: ID does not exist" containerID="ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.823799 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f"} err="failed to get container status \"ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f\": rpc error: code = NotFound desc = could not find container \"ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f\": container with ID starting with ea9b1194ffd65a3518c42a8fb71c6375dafffa2eab2cf122adafc7b1643da11f not found: ID does not exist" Oct 07 20:36:42 crc kubenswrapper[4813]: I1007 20:36:42.958055 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-m5pf5_4edb32ce-3490-4665-8fde-69010044b237/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:43 crc kubenswrapper[4813]: I1007 20:36:43.064893 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-687ddb5b-lwwn2_a0b0d403-9a0c-407b-a3d4-a0db3e612092/horizon-log/0.log" Oct 07 20:36:43 crc kubenswrapper[4813]: I1007 20:36:43.357634 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-25k9r_b7876782-6cc3-47e2-ab62-b9082196a5c8/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:43 crc kubenswrapper[4813]: I1007 20:36:43.654680 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29331121-7mg5t_f060a35b-a8f6-4392-82bf-9e557928512c/keystone-cron/0.log" Oct 07 20:36:43 crc kubenswrapper[4813]: I1007 20:36:43.695416 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-68ff4bb5b-nhpkd_dbaf5ea4-f023-4620-a2cb-45bd1b8c61a3/keystone-api/0.log" Oct 07 20:36:43 crc kubenswrapper[4813]: I1007 20:36:43.712213 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ba18b055-6c70-4c3c-b464-8138c86bc3ea/kube-state-metrics/0.log" Oct 07 20:36:44 crc kubenswrapper[4813]: I1007 20:36:44.087676 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-4dn96_a4b24290-359e-4973-bf65-53ca4889870d/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:44 crc kubenswrapper[4813]: I1007 20:36:44.619034 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" path="/var/lib/kubelet/pods/6c98257c-0861-40e1-bbd9-9d0a80f1846c/volumes" Oct 07 20:36:44 crc kubenswrapper[4813]: I1007 20:36:44.792539 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-f9bbl_d4674843-15aa-4490-a878-bc2853b4457b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:44 crc kubenswrapper[4813]: I1007 20:36:44.866735 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5496dd8845-nwmf5_013c7ce7-ad1b-4f61-920b-f5c5f685dcd7/neutron-httpd/0.log" Oct 07 20:36:45 crc kubenswrapper[4813]: I1007 20:36:45.077300 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5496dd8845-nwmf5_013c7ce7-ad1b-4f61-920b-f5c5f685dcd7/neutron-api/0.log" Oct 07 20:36:46 crc kubenswrapper[4813]: I1007 20:36:46.332764 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c7632bfd-361b-4c06-a1cf-2ec99cd2c2a1/nova-cell0-conductor-conductor/0.log" Oct 07 20:36:46 crc kubenswrapper[4813]: I1007 20:36:46.541746 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_d330e133-a612-477b-afbd-2af06b9e084d/memcached/0.log" Oct 07 20:36:46 crc kubenswrapper[4813]: I1007 20:36:46.552973 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_efa771aa-3427-4b7e-b8a8-775222785447/nova-api-log/0.log" Oct 07 20:36:46 crc kubenswrapper[4813]: I1007 20:36:46.680310 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_efa771aa-3427-4b7e-b8a8-775222785447/nova-api-api/0.log" Oct 07 20:36:46 crc kubenswrapper[4813]: I1007 20:36:46.930495 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_bed232f8-c7a0-446c-8667-0fb3afda3343/nova-cell1-novncproxy-novncproxy/0.log" Oct 07 20:36:46 crc kubenswrapper[4813]: I1007 20:36:46.957670 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_2536ca60-1d39-40b9-a15b-708804ec9fa5/nova-cell1-conductor-conductor/0.log" Oct 07 20:36:47 crc kubenswrapper[4813]: I1007 20:36:47.177228 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-ltrnt_658e18b5-93de-4f7b-962b-fcc403470a2c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:47 crc kubenswrapper[4813]: I1007 20:36:47.290435 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ee05e116-f577-4638-8c15-6fb6ff348eaf/nova-metadata-log/0.log" Oct 07 20:36:47 crc kubenswrapper[4813]: I1007 20:36:47.842074 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1a2d18a4-7c93-4743-8f3d-3367a4dd937a/mysql-bootstrap/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.067864 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e032c1c7-6f6c-4265-9320-0500b815ec64/nova-scheduler-scheduler/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.068624 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1a2d18a4-7c93-4743-8f3d-3367a4dd937a/galera/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.150386 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1a2d18a4-7c93-4743-8f3d-3367a4dd937a/mysql-bootstrap/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.371769 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b/mysql-bootstrap/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.587259 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ee05e116-f577-4638-8c15-6fb6ff348eaf/nova-metadata-metadata/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.600193 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b/mysql-bootstrap/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.698058 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_93ab11a3-9ed5-4cda-85cc-eea1e91f2d3b/galera/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.809082 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_00a18181-39b8-42bc-8cc9-4518c7a16137/openstackclient/0.log" Oct 07 20:36:48 crc kubenswrapper[4813]: I1007 20:36:48.961393 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jd55f_a162a130-6094-42c0-a3d1-489de4a7fac4/ovn-controller/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.005137 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ww8fx_1276e9fd-662d-41f1-8c9d-05abbbfbf0a2/openstack-network-exporter/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.215517 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovsdb-server-init/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.371409 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovsdb-server-init/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.375976 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovsdb-server/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.381565 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-dmq4j_47f8d464-3eaa-4ee5-ae74-c6339710ade0/ovs-vswitchd/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.562457 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d09b3567-cc2a-48cc-b1ea-b0c65fee032d/openstack-network-exporter/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.613878 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-96t9v_adaa6c4f-3899-4644-acb5-81f67417971e/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.692577 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_d09b3567-cc2a-48cc-b1ea-b0c65fee032d/ovn-northd/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.869906 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b6f3d5f7-8af0-4f42-ae53-bc7473860346/openstack-network-exporter/0.log" Oct 07 20:36:49 crc kubenswrapper[4813]: I1007 20:36:49.995661 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b6f3d5f7-8af0-4f42-ae53-bc7473860346/ovsdbserver-nb/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.123993 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_55f9cac2-ed84-40f8-8bca-f10c774814f7/openstack-network-exporter/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.129774 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_55f9cac2-ed84-40f8-8bca-f10c774814f7/ovsdbserver-sb/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.429357 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-bd4864b74-5mp8m_83739b1f-81fa-4e83-baea-f75bae3f1ea5/placement-api/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.526556 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-bd4864b74-5mp8m_83739b1f-81fa-4e83-baea-f75bae3f1ea5/placement-log/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.574057 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2412d699-edb6-474b-95da-eb29d703dfd4/setup-container/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.730036 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2412d699-edb6-474b-95da-eb29d703dfd4/rabbitmq/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.779202 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d486108c-7921-4770-81bf-b309787cbf5a/setup-container/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.827493 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2412d699-edb6-474b-95da-eb29d703dfd4/setup-container/0.log" Oct 07 20:36:50 crc kubenswrapper[4813]: I1007 20:36:50.982392 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d486108c-7921-4770-81bf-b309787cbf5a/setup-container/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.031895 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-v7fxf_a9b763f6-c95e-4650-8aa4-3f99675f3e48/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.096157 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d486108c-7921-4770-81bf-b309787cbf5a/rabbitmq/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.247141 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-tdk5g_947e90ca-70e6-4956-a58b-06c3faf10445/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.305630 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-5v5fz_f558bb4b-742a-4c7d-bad0-ce2356b9765c/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.521550 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4ntxh_ab40c88e-7fbf-44d6-83a6-0bb6be959120/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.580983 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-z5kbp_4a506120-df34-41d9-b92a-9e8944c15dcf/ssh-known-hosts-edpm-deployment/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.855285 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65d65664c-r46qm_d3c7d72e-ba30-402f-99f1-aff8e4c688ee/proxy-server/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.884415 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-65d65664c-r46qm_d3c7d72e-ba30-402f-99f1-aff8e4c688ee/proxy-httpd/0.log" Oct 07 20:36:51 crc kubenswrapper[4813]: I1007 20:36:51.998953 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-bj76f_8cee7433-9535-4db0-aa37-e8fc28bdbf94/swift-ring-rebalance/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.078554 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.078613 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.080548 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-auditor/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.127011 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-reaper/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.241027 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-replicator/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.338070 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-auditor/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.360066 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-replicator/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.377794 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/account-server/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.444846 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-server/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.553940 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/container-updater/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.569288 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-auditor/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.602661 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-expirer/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.656889 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-replicator/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.761425 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-server/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.785413 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/rsync/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.812253 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/object-updater/0.log" Oct 07 20:36:52 crc kubenswrapper[4813]: I1007 20:36:52.879642 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bd53a283-8633-435c-a910-ab9abccb5c0d/swift-recon-cron/0.log" Oct 07 20:36:53 crc kubenswrapper[4813]: I1007 20:36:53.026501 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-l6bh7_07dc752d-c126-4085-9367-ca8bcee2c1ec/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:36:53 crc kubenswrapper[4813]: I1007 20:36:53.166518 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_8a9b2bd1-18d3-4b04-bb13-a9e0ecd0c136/tempest-tests-tempest-tests-runner/0.log" Oct 07 20:36:53 crc kubenswrapper[4813]: I1007 20:36:53.320238 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_938f4244-0f47-43a8-af88-c2a117af6d37/test-operator-logs-container/0.log" Oct 07 20:36:53 crc kubenswrapper[4813]: I1007 20:36:53.446928 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-fz2km_6db1d1eb-2150-4a66-bdae-015b651da395/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 07 20:37:13 crc kubenswrapper[4813]: I1007 20:37:13.978577 4813 generic.go:334] "Generic (PLEG): container finished" podID="84542aa1-68cd-4ec8-80c0-9deea1fb45ee" containerID="26e489c6191edc565985f538da9d781f32c6f53075084141f79b0769ea65053e" exitCode=0 Oct 07 20:37:13 crc kubenswrapper[4813]: I1007 20:37:13.979031 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" event={"ID":"84542aa1-68cd-4ec8-80c0-9deea1fb45ee","Type":"ContainerDied","Data":"26e489c6191edc565985f538da9d781f32c6f53075084141f79b0769ea65053e"} Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.637908 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.670279 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-q9kbw"] Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.677641 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-q9kbw"] Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.728796 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-host\") pod \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.728890 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-host" (OuterVolumeSpecName: "host") pod "84542aa1-68cd-4ec8-80c0-9deea1fb45ee" (UID: "84542aa1-68cd-4ec8-80c0-9deea1fb45ee"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.729360 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nnbb\" (UniqueName: \"kubernetes.io/projected/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-kube-api-access-7nnbb\") pod \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\" (UID: \"84542aa1-68cd-4ec8-80c0-9deea1fb45ee\") " Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.730564 4813 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-host\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.734973 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-kube-api-access-7nnbb" (OuterVolumeSpecName: "kube-api-access-7nnbb") pod "84542aa1-68cd-4ec8-80c0-9deea1fb45ee" (UID: "84542aa1-68cd-4ec8-80c0-9deea1fb45ee"). InnerVolumeSpecName "kube-api-access-7nnbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.832644 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nnbb\" (UniqueName: \"kubernetes.io/projected/84542aa1-68cd-4ec8-80c0-9deea1fb45ee-kube-api-access-7nnbb\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.999451 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d57631ff97398a50104e4fec6bf332371ccbdd64b7d9c904e106b3c895fd70e3" Oct 07 20:37:15 crc kubenswrapper[4813]: I1007 20:37:15.999526 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-q9kbw" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.611813 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84542aa1-68cd-4ec8-80c0-9deea1fb45ee" path="/var/lib/kubelet/pods/84542aa1-68cd-4ec8-80c0-9deea1fb45ee/volumes" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932283 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-97bjx"] Oct 07 20:37:16 crc kubenswrapper[4813]: E1007 20:37:16.932653 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="extract-content" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932665 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="extract-content" Oct 07 20:37:16 crc kubenswrapper[4813]: E1007 20:37:16.932679 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="registry-server" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932685 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="registry-server" Oct 07 20:37:16 crc kubenswrapper[4813]: E1007 20:37:16.932716 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84542aa1-68cd-4ec8-80c0-9deea1fb45ee" containerName="container-00" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932722 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="84542aa1-68cd-4ec8-80c0-9deea1fb45ee" containerName="container-00" Oct 07 20:37:16 crc kubenswrapper[4813]: E1007 20:37:16.932743 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="extract-utilities" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932749 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="extract-utilities" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932921 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c98257c-0861-40e1-bbd9-9d0a80f1846c" containerName="registry-server" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.932944 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="84542aa1-68cd-4ec8-80c0-9deea1fb45ee" containerName="container-00" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.933532 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:16 crc kubenswrapper[4813]: I1007 20:37:16.936793 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jdm8p"/"default-dockercfg-k9x4g" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.054152 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2s5x\" (UniqueName: \"kubernetes.io/projected/3fe34745-890b-4ae9-a0fe-31fef499dd1f-kube-api-access-v2s5x\") pod \"crc-debug-97bjx\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.054217 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe34745-890b-4ae9-a0fe-31fef499dd1f-host\") pod \"crc-debug-97bjx\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.156166 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2s5x\" (UniqueName: \"kubernetes.io/projected/3fe34745-890b-4ae9-a0fe-31fef499dd1f-kube-api-access-v2s5x\") pod \"crc-debug-97bjx\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.156228 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe34745-890b-4ae9-a0fe-31fef499dd1f-host\") pod \"crc-debug-97bjx\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.156385 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe34745-890b-4ae9-a0fe-31fef499dd1f-host\") pod \"crc-debug-97bjx\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.190155 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2s5x\" (UniqueName: \"kubernetes.io/projected/3fe34745-890b-4ae9-a0fe-31fef499dd1f-kube-api-access-v2s5x\") pod \"crc-debug-97bjx\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: I1007 20:37:17.248685 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:17 crc kubenswrapper[4813]: W1007 20:37:17.287661 4813 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fe34745_890b_4ae9_a0fe_31fef499dd1f.slice/crio-cf7ba75ba64de8c4e7c6e823a826ebe438fc69d1cedb7682f48d9f84e81e8ebb WatchSource:0}: Error finding container cf7ba75ba64de8c4e7c6e823a826ebe438fc69d1cedb7682f48d9f84e81e8ebb: Status 404 returned error can't find the container with id cf7ba75ba64de8c4e7c6e823a826ebe438fc69d1cedb7682f48d9f84e81e8ebb Oct 07 20:37:18 crc kubenswrapper[4813]: I1007 20:37:18.016696 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" event={"ID":"3fe34745-890b-4ae9-a0fe-31fef499dd1f","Type":"ContainerStarted","Data":"3b7bac8910fc956cab9a413748b6f2dc89978b2ed4b6ac44c0ea8cf379deceac"} Oct 07 20:37:18 crc kubenswrapper[4813]: I1007 20:37:18.017270 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" event={"ID":"3fe34745-890b-4ae9-a0fe-31fef499dd1f","Type":"ContainerStarted","Data":"cf7ba75ba64de8c4e7c6e823a826ebe438fc69d1cedb7682f48d9f84e81e8ebb"} Oct 07 20:37:18 crc kubenswrapper[4813]: I1007 20:37:18.032930 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" podStartSLOduration=2.032908242 podStartE2EDuration="2.032908242s" podCreationTimestamp="2025-10-07 20:37:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-07 20:37:18.028694741 +0000 UTC m=+4764.106950352" watchObservedRunningTime="2025-10-07 20:37:18.032908242 +0000 UTC m=+4764.111163883" Oct 07 20:37:19 crc kubenswrapper[4813]: I1007 20:37:19.034304 4813 generic.go:334] "Generic (PLEG): container finished" podID="3fe34745-890b-4ae9-a0fe-31fef499dd1f" containerID="3b7bac8910fc956cab9a413748b6f2dc89978b2ed4b6ac44c0ea8cf379deceac" exitCode=0 Oct 07 20:37:19 crc kubenswrapper[4813]: I1007 20:37:19.034371 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" event={"ID":"3fe34745-890b-4ae9-a0fe-31fef499dd1f","Type":"ContainerDied","Data":"3b7bac8910fc956cab9a413748b6f2dc89978b2ed4b6ac44c0ea8cf379deceac"} Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.154253 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.204143 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe34745-890b-4ae9-a0fe-31fef499dd1f-host\") pod \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.204448 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2s5x\" (UniqueName: \"kubernetes.io/projected/3fe34745-890b-4ae9-a0fe-31fef499dd1f-kube-api-access-v2s5x\") pod \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\" (UID: \"3fe34745-890b-4ae9-a0fe-31fef499dd1f\") " Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.204791 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fe34745-890b-4ae9-a0fe-31fef499dd1f-host" (OuterVolumeSpecName: "host") pod "3fe34745-890b-4ae9-a0fe-31fef499dd1f" (UID: "3fe34745-890b-4ae9-a0fe-31fef499dd1f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.205426 4813 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe34745-890b-4ae9-a0fe-31fef499dd1f-host\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.215418 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fe34745-890b-4ae9-a0fe-31fef499dd1f-kube-api-access-v2s5x" (OuterVolumeSpecName: "kube-api-access-v2s5x") pod "3fe34745-890b-4ae9-a0fe-31fef499dd1f" (UID: "3fe34745-890b-4ae9-a0fe-31fef499dd1f"). InnerVolumeSpecName "kube-api-access-v2s5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:37:20 crc kubenswrapper[4813]: I1007 20:37:20.306538 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2s5x\" (UniqueName: \"kubernetes.io/projected/3fe34745-890b-4ae9-a0fe-31fef499dd1f-kube-api-access-v2s5x\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:21 crc kubenswrapper[4813]: I1007 20:37:21.055808 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" event={"ID":"3fe34745-890b-4ae9-a0fe-31fef499dd1f","Type":"ContainerDied","Data":"cf7ba75ba64de8c4e7c6e823a826ebe438fc69d1cedb7682f48d9f84e81e8ebb"} Oct 07 20:37:21 crc kubenswrapper[4813]: I1007 20:37:21.055864 4813 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf7ba75ba64de8c4e7c6e823a826ebe438fc69d1cedb7682f48d9f84e81e8ebb" Oct 07 20:37:21 crc kubenswrapper[4813]: I1007 20:37:21.055938 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-97bjx" Oct 07 20:37:22 crc kubenswrapper[4813]: I1007 20:37:22.078584 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:37:22 crc kubenswrapper[4813]: I1007 20:37:22.078924 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:37:25 crc kubenswrapper[4813]: I1007 20:37:25.630139 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-97bjx"] Oct 07 20:37:25 crc kubenswrapper[4813]: I1007 20:37:25.640848 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-97bjx"] Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.617559 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fe34745-890b-4ae9-a0fe-31fef499dd1f" path="/var/lib/kubelet/pods/3fe34745-890b-4ae9-a0fe-31fef499dd1f/volumes" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.839022 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-dqwp8"] Oct 07 20:37:26 crc kubenswrapper[4813]: E1007 20:37:26.839458 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fe34745-890b-4ae9-a0fe-31fef499dd1f" containerName="container-00" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.839474 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fe34745-890b-4ae9-a0fe-31fef499dd1f" containerName="container-00" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.839683 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fe34745-890b-4ae9-a0fe-31fef499dd1f" containerName="container-00" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.840378 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.844081 4813 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jdm8p"/"default-dockercfg-k9x4g" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.913584 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d8f717a-1221-4eb2-8759-50f350eeb909-host\") pod \"crc-debug-dqwp8\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:26 crc kubenswrapper[4813]: I1007 20:37:26.913788 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rgrj\" (UniqueName: \"kubernetes.io/projected/3d8f717a-1221-4eb2-8759-50f350eeb909-kube-api-access-4rgrj\") pod \"crc-debug-dqwp8\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:27 crc kubenswrapper[4813]: I1007 20:37:27.015036 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d8f717a-1221-4eb2-8759-50f350eeb909-host\") pod \"crc-debug-dqwp8\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:27 crc kubenswrapper[4813]: I1007 20:37:27.015373 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rgrj\" (UniqueName: \"kubernetes.io/projected/3d8f717a-1221-4eb2-8759-50f350eeb909-kube-api-access-4rgrj\") pod \"crc-debug-dqwp8\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:27 crc kubenswrapper[4813]: I1007 20:37:27.015219 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d8f717a-1221-4eb2-8759-50f350eeb909-host\") pod \"crc-debug-dqwp8\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:27 crc kubenswrapper[4813]: I1007 20:37:27.042364 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rgrj\" (UniqueName: \"kubernetes.io/projected/3d8f717a-1221-4eb2-8759-50f350eeb909-kube-api-access-4rgrj\") pod \"crc-debug-dqwp8\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:27 crc kubenswrapper[4813]: I1007 20:37:27.180804 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.109920 4813 generic.go:334] "Generic (PLEG): container finished" podID="3d8f717a-1221-4eb2-8759-50f350eeb909" containerID="3e03ae268d041aaf99f32902d15d49a4f3266b8f64a6712aa469e7c2170b4e71" exitCode=0 Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.110245 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" event={"ID":"3d8f717a-1221-4eb2-8759-50f350eeb909","Type":"ContainerDied","Data":"3e03ae268d041aaf99f32902d15d49a4f3266b8f64a6712aa469e7c2170b4e71"} Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.110279 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" event={"ID":"3d8f717a-1221-4eb2-8759-50f350eeb909","Type":"ContainerStarted","Data":"71e89930ded6892a3f5da8529a4f27e5458fd939cf18243eeecc462fc4a8a223"} Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.158859 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-dqwp8"] Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.167745 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jdm8p/crc-debug-dqwp8"] Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.920214 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7fqvn"] Oct 07 20:37:28 crc kubenswrapper[4813]: E1007 20:37:28.920574 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d8f717a-1221-4eb2-8759-50f350eeb909" containerName="container-00" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.920589 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d8f717a-1221-4eb2-8759-50f350eeb909" containerName="container-00" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.920792 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d8f717a-1221-4eb2-8759-50f350eeb909" containerName="container-00" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.921997 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.949061 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7fqvn"] Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.951742 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-catalog-content\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.951935 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-utilities\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:28 crc kubenswrapper[4813]: I1007 20:37:28.951988 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-248x8\" (UniqueName: \"kubernetes.io/projected/3c031e0d-bebb-4af7-93db-f77c5567977c-kube-api-access-248x8\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.053680 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-utilities\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.053768 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-248x8\" (UniqueName: \"kubernetes.io/projected/3c031e0d-bebb-4af7-93db-f77c5567977c-kube-api-access-248x8\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.053823 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-catalog-content\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.054192 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-utilities\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.054257 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-catalog-content\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.073227 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-248x8\" (UniqueName: \"kubernetes.io/projected/3c031e0d-bebb-4af7-93db-f77c5567977c-kube-api-access-248x8\") pod \"certified-operators-7fqvn\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.223897 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.245902 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.258810 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d8f717a-1221-4eb2-8759-50f350eeb909-host\") pod \"3d8f717a-1221-4eb2-8759-50f350eeb909\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.259207 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rgrj\" (UniqueName: \"kubernetes.io/projected/3d8f717a-1221-4eb2-8759-50f350eeb909-kube-api-access-4rgrj\") pod \"3d8f717a-1221-4eb2-8759-50f350eeb909\" (UID: \"3d8f717a-1221-4eb2-8759-50f350eeb909\") " Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.258947 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d8f717a-1221-4eb2-8759-50f350eeb909-host" (OuterVolumeSpecName: "host") pod "3d8f717a-1221-4eb2-8759-50f350eeb909" (UID: "3d8f717a-1221-4eb2-8759-50f350eeb909"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.278573 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d8f717a-1221-4eb2-8759-50f350eeb909-kube-api-access-4rgrj" (OuterVolumeSpecName: "kube-api-access-4rgrj") pod "3d8f717a-1221-4eb2-8759-50f350eeb909" (UID: "3d8f717a-1221-4eb2-8759-50f350eeb909"). InnerVolumeSpecName "kube-api-access-4rgrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.361177 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rgrj\" (UniqueName: \"kubernetes.io/projected/3d8f717a-1221-4eb2-8759-50f350eeb909-kube-api-access-4rgrj\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.361207 4813 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3d8f717a-1221-4eb2-8759-50f350eeb909-host\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:29 crc kubenswrapper[4813]: I1007 20:37:29.895639 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7fqvn"] Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.171187 4813 scope.go:117] "RemoveContainer" containerID="3e03ae268d041aaf99f32902d15d49a4f3266b8f64a6712aa469e7c2170b4e71" Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.171293 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/crc-debug-dqwp8" Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.218188 4813 generic.go:334] "Generic (PLEG): container finished" podID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerID="d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52" exitCode=0 Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.218536 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerDied","Data":"d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52"} Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.218568 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerStarted","Data":"1c6e58b7e3c30bba0306a5e01d4ad695cc1880b957e147017df2720aa79293c1"} Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.611639 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d8f717a-1221-4eb2-8759-50f350eeb909" path="/var/lib/kubelet/pods/3d8f717a-1221-4eb2-8759-50f350eeb909/volumes" Oct 07 20:37:30 crc kubenswrapper[4813]: I1007 20:37:30.769712 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/util/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.022402 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/pull/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.038398 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/util/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.088912 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/pull/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.262064 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/extract/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.274399 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/pull/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.350989 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a8dccb7659b04b166960144014de0f047f3160ae84ef4ca9a0659290052cxpd_0413a8f5-bc87-4e1c-b38c-778c0fff449c/util/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.506315 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-fgdgs_e90691e1-eed5-4c60-af67-46cfca160910/kube-rbac-proxy/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.572060 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-fgdgs_e90691e1-eed5-4c60-af67-46cfca160910/manager/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.675722 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-m49nk_8ff43feb-7984-4f63-b5b4-ab460e72ddc8/kube-rbac-proxy/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.809064 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-m49nk_8ff43feb-7984-4f63-b5b4-ab460e72ddc8/manager/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.862293 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ndtgw_145ac332-1c3f-4aec-8438-0c3d36ca2c67/kube-rbac-proxy/0.log" Oct 07 20:37:31 crc kubenswrapper[4813]: I1007 20:37:31.908402 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-ndtgw_145ac332-1c3f-4aec-8438-0c3d36ca2c67/manager/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.114983 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-lk6lc_66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2/kube-rbac-proxy/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.117779 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-lk6lc_66a77b79-c1e6-4bb5-aa4f-3b6e97a536c2/manager/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.239660 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerStarted","Data":"6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133"} Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.298485 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-mtpdz_ff4408c4-9269-43c0-8016-520816b8cd5d/kube-rbac-proxy/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.318966 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-mtpdz_ff4408c4-9269-43c0-8016-520816b8cd5d/manager/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.472423 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-zpbgs_0a182939-eba6-4da5-9e36-567b6a2a37c3/kube-rbac-proxy/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.521899 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-zpbgs_0a182939-eba6-4da5-9e36-567b6a2a37c3/manager/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.661758 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-gngt9_72b45aa2-2bd2-4339-8a89-5a2910798969/kube-rbac-proxy/0.log" Oct 07 20:37:32 crc kubenswrapper[4813]: I1007 20:37:32.823752 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-585fc5b659-gngt9_72b45aa2-2bd2-4339-8a89-5a2910798969/manager/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.249700 4813 generic.go:334] "Generic (PLEG): container finished" podID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerID="6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133" exitCode=0 Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.249792 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerDied","Data":"6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133"} Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.257356 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-662qq_0bac4f10-1d47-40aa-b93e-9a0789801e9b/manager/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.299721 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-662qq_0bac4f10-1d47-40aa-b93e-9a0789801e9b/kube-rbac-proxy/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.376002 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-kjl4l_b487945e-823b-4d95-a1dc-6f7148aa053c/kube-rbac-proxy/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.496022 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-kjl4l_b487945e-823b-4d95-a1dc-6f7148aa053c/manager/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.546951 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v6ggr_eb9b4085-2e2d-4955-bbd3-2c53bcada088/kube-rbac-proxy/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.635939 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-v6ggr_eb9b4085-2e2d-4955-bbd3-2c53bcada088/manager/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.787932 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-6xkm2_ff2bb528-f133-456a-9e91-5f4ef07a4f2f/kube-rbac-proxy/0.log" Oct 07 20:37:33 crc kubenswrapper[4813]: I1007 20:37:33.896170 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-6xkm2_ff2bb528-f133-456a-9e91-5f4ef07a4f2f/manager/0.log" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.069011 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xzgn5_954d30ae-2fcd-4d29-8d44-a1cf40b56f27/kube-rbac-proxy/0.log" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.103347 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xzgn5_954d30ae-2fcd-4d29-8d44-a1cf40b56f27/manager/0.log" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.183642 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7m9t5_8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8/kube-rbac-proxy/0.log" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.262676 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerStarted","Data":"3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99"} Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.281499 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7fqvn" podStartSLOduration=2.8167591400000003 podStartE2EDuration="6.281484228s" podCreationTimestamp="2025-10-07 20:37:28 +0000 UTC" firstStartedPulling="2025-10-07 20:37:30.298703357 +0000 UTC m=+4776.376958968" lastFinishedPulling="2025-10-07 20:37:33.763428445 +0000 UTC m=+4779.841684056" observedRunningTime="2025-10-07 20:37:34.279567723 +0000 UTC m=+4780.357823334" watchObservedRunningTime="2025-10-07 20:37:34.281484228 +0000 UTC m=+4780.359739839" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.830812 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7m9t5_8f3e8c30-6a9f-40a0-a95d-ad5e02b454b8/manager/0.log" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.838671 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-x9rm6_6ae873de-e4da-48cc-9c55-143f61cdf190/kube-rbac-proxy/0.log" Oct 07 20:37:34 crc kubenswrapper[4813]: I1007 20:37:34.899790 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-x9rm6_6ae873de-e4da-48cc-9c55-143f61cdf190/manager/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.069623 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q_d39bfd53-3ae2-4fe1-a07e-9592be7062b6/kube-rbac-proxy/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.120365 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dkjs6q_d39bfd53-3ae2-4fe1-a07e-9592be7062b6/manager/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.141806 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6589b7f7cf-b6lcq_28f57161-1102-46a9-99a0-67fc1fc2ca33/kube-rbac-proxy/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.454125 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-765cf949f-6sxnv_e0480957-44d9-4dcf-915f-ba4db55ad450/kube-rbac-proxy/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.649966 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-r7css_ec9bfed9-1014-4ae2-ad89-b1815b613369/registry-server/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.651913 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-765cf949f-6sxnv_e0480957-44d9-4dcf-915f-ba4db55ad450/operator/0.log" Oct 07 20:37:35 crc kubenswrapper[4813]: I1007 20:37:35.814611 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f96f8c84-vv8xc_155009c1-92c2-493c-8969-12710fed4ec0/kube-rbac-proxy/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.012570 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f96f8c84-vv8xc_155009c1-92c2-493c-8969-12710fed4ec0/manager/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.106704 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-rhpjg_60dd68e0-dc15-4515-aab8-91f2cbd44487/manager/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.157686 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-rhpjg_60dd68e0-dc15-4515-aab8-91f2cbd44487/kube-rbac-proxy/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.264860 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-hp2q5_58a86259-bcad-428f-9d1d-5e8c059403a8/operator/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.336804 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6589b7f7cf-b6lcq_28f57161-1102-46a9-99a0-67fc1fc2ca33/manager/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.400556 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-j2xrx_7f9b49af-fca0-48b3-8291-db67e1597599/kube-rbac-proxy/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.486986 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-j2xrx_7f9b49af-fca0-48b3-8291-db67e1597599/manager/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.588389 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-775776c574-s879n_edbe78c4-559a-4296-a16d-37c92634c84f/manager/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.606006 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-775776c574-s879n_edbe78c4-559a-4296-a16d-37c92634c84f/kube-rbac-proxy/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.797401 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-74665f6cdc-5p9hd_ceb59888-cd38-4300-93ea-d8f00d0b3b6c/kube-rbac-proxy/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.862506 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5dd4499c96-nhkpb_4ced110c-65fb-4a77-aa0a-1a999a911ec1/kube-rbac-proxy/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.900410 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-74665f6cdc-5p9hd_ceb59888-cd38-4300-93ea-d8f00d0b3b6c/manager/0.log" Oct 07 20:37:36 crc kubenswrapper[4813]: I1007 20:37:36.931854 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5dd4499c96-nhkpb_4ced110c-65fb-4a77-aa0a-1a999a911ec1/manager/0.log" Oct 07 20:37:39 crc kubenswrapper[4813]: I1007 20:37:39.246413 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:39 crc kubenswrapper[4813]: I1007 20:37:39.246679 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:40 crc kubenswrapper[4813]: I1007 20:37:40.295257 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-7fqvn" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="registry-server" probeResult="failure" output=< Oct 07 20:37:40 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:37:40 crc kubenswrapper[4813]: > Oct 07 20:37:49 crc kubenswrapper[4813]: I1007 20:37:49.294724 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:49 crc kubenswrapper[4813]: I1007 20:37:49.348185 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:49 crc kubenswrapper[4813]: I1007 20:37:49.528517 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7fqvn"] Oct 07 20:37:50 crc kubenswrapper[4813]: I1007 20:37:50.409752 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7fqvn" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="registry-server" containerID="cri-o://3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99" gracePeriod=2 Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.152768 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.188405 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-248x8\" (UniqueName: \"kubernetes.io/projected/3c031e0d-bebb-4af7-93db-f77c5567977c-kube-api-access-248x8\") pod \"3c031e0d-bebb-4af7-93db-f77c5567977c\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.188621 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-catalog-content\") pod \"3c031e0d-bebb-4af7-93db-f77c5567977c\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.188679 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-utilities\") pod \"3c031e0d-bebb-4af7-93db-f77c5567977c\" (UID: \"3c031e0d-bebb-4af7-93db-f77c5567977c\") " Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.189510 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-utilities" (OuterVolumeSpecName: "utilities") pod "3c031e0d-bebb-4af7-93db-f77c5567977c" (UID: "3c031e0d-bebb-4af7-93db-f77c5567977c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.192513 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.204560 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c031e0d-bebb-4af7-93db-f77c5567977c-kube-api-access-248x8" (OuterVolumeSpecName: "kube-api-access-248x8") pod "3c031e0d-bebb-4af7-93db-f77c5567977c" (UID: "3c031e0d-bebb-4af7-93db-f77c5567977c"). InnerVolumeSpecName "kube-api-access-248x8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.261692 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c031e0d-bebb-4af7-93db-f77c5567977c" (UID: "3c031e0d-bebb-4af7-93db-f77c5567977c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.294439 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-248x8\" (UniqueName: \"kubernetes.io/projected/3c031e0d-bebb-4af7-93db-f77c5567977c-kube-api-access-248x8\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.294479 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c031e0d-bebb-4af7-93db-f77c5567977c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.427768 4813 generic.go:334] "Generic (PLEG): container finished" podID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerID="3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99" exitCode=0 Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.428329 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerDied","Data":"3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99"} Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.428459 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7fqvn" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.429348 4813 scope.go:117] "RemoveContainer" containerID="3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.429258 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7fqvn" event={"ID":"3c031e0d-bebb-4af7-93db-f77c5567977c","Type":"ContainerDied","Data":"1c6e58b7e3c30bba0306a5e01d4ad695cc1880b957e147017df2720aa79293c1"} Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.451319 4813 scope.go:117] "RemoveContainer" containerID="6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.469255 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7fqvn"] Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.483468 4813 scope.go:117] "RemoveContainer" containerID="d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.486724 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7fqvn"] Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.514076 4813 scope.go:117] "RemoveContainer" containerID="3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99" Oct 07 20:37:51 crc kubenswrapper[4813]: E1007 20:37:51.514516 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99\": container with ID starting with 3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99 not found: ID does not exist" containerID="3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.514544 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99"} err="failed to get container status \"3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99\": rpc error: code = NotFound desc = could not find container \"3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99\": container with ID starting with 3969f612589f09c2270fb88b0936831ecb28761e224918742755aa6512c5cb99 not found: ID does not exist" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.514562 4813 scope.go:117] "RemoveContainer" containerID="6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133" Oct 07 20:37:51 crc kubenswrapper[4813]: E1007 20:37:51.514884 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133\": container with ID starting with 6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133 not found: ID does not exist" containerID="6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.514905 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133"} err="failed to get container status \"6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133\": rpc error: code = NotFound desc = could not find container \"6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133\": container with ID starting with 6c0a23d879fe8a7152c0c2375f1069290298e3d24ba01bd38837b35325023133 not found: ID does not exist" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.514917 4813 scope.go:117] "RemoveContainer" containerID="d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52" Oct 07 20:37:51 crc kubenswrapper[4813]: E1007 20:37:51.515228 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52\": container with ID starting with d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52 not found: ID does not exist" containerID="d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52" Oct 07 20:37:51 crc kubenswrapper[4813]: I1007 20:37:51.515250 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52"} err="failed to get container status \"d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52\": rpc error: code = NotFound desc = could not find container \"d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52\": container with ID starting with d84fb9a7609cc6b63a543cf165c4d7047b7941900a29771d3fa920d491714d52 not found: ID does not exist" Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.078312 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.078393 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.078444 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.079229 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cc892fa0043484caa2f6d2787f88fed7a1e1635a66e9a8e0e51588d6df4363cf"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.079298 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://cc892fa0043484caa2f6d2787f88fed7a1e1635a66e9a8e0e51588d6df4363cf" gracePeriod=600 Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.443982 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="cc892fa0043484caa2f6d2787f88fed7a1e1635a66e9a8e0e51588d6df4363cf" exitCode=0 Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.444088 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"cc892fa0043484caa2f6d2787f88fed7a1e1635a66e9a8e0e51588d6df4363cf"} Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.444143 4813 scope.go:117] "RemoveContainer" containerID="d94793f199e1cb9af1567aebab012b7742e91365c7da9bcb50a4caa1bc9abee2" Oct 07 20:37:52 crc kubenswrapper[4813]: I1007 20:37:52.613311 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" path="/var/lib/kubelet/pods/3c031e0d-bebb-4af7-93db-f77c5567977c/volumes" Oct 07 20:37:53 crc kubenswrapper[4813]: I1007 20:37:53.455435 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerStarted","Data":"bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3"} Oct 07 20:37:53 crc kubenswrapper[4813]: I1007 20:37:53.964532 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-cpgrb_93cc15e9-3ae8-49f7-a7bf-8a3b0f453ec2/control-plane-machine-set-operator/0.log" Oct 07 20:37:54 crc kubenswrapper[4813]: I1007 20:37:54.120801 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9crzl_ca47c43c-9e61-4697-b7f5-7cec65e2c992/kube-rbac-proxy/0.log" Oct 07 20:37:54 crc kubenswrapper[4813]: I1007 20:37:54.125850 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9crzl_ca47c43c-9e61-4697-b7f5-7cec65e2c992/machine-api-operator/0.log" Oct 07 20:38:07 crc kubenswrapper[4813]: I1007 20:38:07.297574 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-7lwr4_c1b0f2ad-748f-4212-809f-9e5d658608e5/cert-manager-controller/0.log" Oct 07 20:38:07 crc kubenswrapper[4813]: I1007 20:38:07.365908 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-k7vkh_13884ec5-d712-4cd6-86d3-b1e6059b5fb7/cert-manager-cainjector/0.log" Oct 07 20:38:07 crc kubenswrapper[4813]: I1007 20:38:07.490294 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-5mt2d_1c7224a1-2e4f-4cc4-a127-3791d5c68f6b/cert-manager-webhook/0.log" Oct 07 20:38:20 crc kubenswrapper[4813]: I1007 20:38:20.940595 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-2fdzf_56036d68-a088-4f16-8fce-0c11b7c9c4e3/nmstate-console-plugin/0.log" Oct 07 20:38:21 crc kubenswrapper[4813]: I1007 20:38:21.655817 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-bw2mb_4fe83b69-4076-411a-b34e-fd61c901eb03/nmstate-handler/0.log" Oct 07 20:38:21 crc kubenswrapper[4813]: I1007 20:38:21.696973 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-7mqw5_3525607e-5512-4d19-a0ce-42df574e763a/kube-rbac-proxy/0.log" Oct 07 20:38:21 crc kubenswrapper[4813]: I1007 20:38:21.718234 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-7mqw5_3525607e-5512-4d19-a0ce-42df574e763a/nmstate-metrics/0.log" Oct 07 20:38:21 crc kubenswrapper[4813]: I1007 20:38:21.853913 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-vx5d8_8350f82b-1b55-4571-83a0-14a18f238c51/nmstate-operator/0.log" Oct 07 20:38:21 crc kubenswrapper[4813]: I1007 20:38:21.922290 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-sr7d4_d66b41fa-f25e-4dd5-8f30-f496940d7d19/nmstate-webhook/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.155096 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-w6bx6_ece86cbe-2002-4e30-bedb-56f9631f5726/controller/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.202161 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-w6bx6_ece86cbe-2002-4e30-bedb-56f9631f5726/kube-rbac-proxy/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.309565 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.443804 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.455609 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.542605 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.542857 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.723442 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.737837 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.765551 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.782621 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.918978 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-metrics/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.938259 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-reloader/0.log" Oct 07 20:38:37 crc kubenswrapper[4813]: I1007 20:38:37.964759 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/cp-frr-files/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.007924 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/controller/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.152594 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/kube-rbac-proxy/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.166573 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/frr-metrics/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.208968 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/kube-rbac-proxy-frr/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.493844 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/reloader/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.546375 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-m79zg_d5c7c957-5714-4478-874f-1fe2cc7809af/frr-k8s-webhook-server/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.701182 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-66cfc88647-54n75_602ff599-0e30-47a2-a316-75053689d031/manager/0.log" Oct 07 20:38:38 crc kubenswrapper[4813]: I1007 20:38:38.897659 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-d65cbb559-4qksm_dc1f8862-6e00-4d65-a90f-f0db7d23cf42/webhook-server/0.log" Oct 07 20:38:39 crc kubenswrapper[4813]: I1007 20:38:39.152022 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-5qpw7_184d7c42-4069-4dbe-a8e7-613da65cfb62/kube-rbac-proxy/0.log" Oct 07 20:38:39 crc kubenswrapper[4813]: I1007 20:38:39.458991 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-64plq_969bd45f-c575-4c44-a7f8-b8fc0fd89a05/frr/0.log" Oct 07 20:38:39 crc kubenswrapper[4813]: I1007 20:38:39.636668 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-5qpw7_184d7c42-4069-4dbe-a8e7-613da65cfb62/speaker/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.155514 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/util/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.380904 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/pull/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.397885 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/pull/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.405703 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/util/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.579356 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/util/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.614941 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/extract/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.641472 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2sjhk6_2d920201-8633-4cea-9d52-95f13d4e80ec/pull/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.763274 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-utilities/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.987370 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-content/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.990757 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-utilities/0.log" Oct 07 20:38:53 crc kubenswrapper[4813]: I1007 20:38:53.994259 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-content/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.203734 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-utilities/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.221596 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/extract-content/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.421512 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-utilities/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.727851 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-content/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.749677 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-9pr84_be8b38bc-131f-4919-91a8-5c761a14a2b1/registry-server/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.759421 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-utilities/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.787958 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-content/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.892560 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-content/0.log" Oct 07 20:38:54 crc kubenswrapper[4813]: I1007 20:38:54.930367 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/extract-utilities/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.217640 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/util/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.452262 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-wl725_6a5eb2de-dec2-49bf-a6f6-bd4fd672afdb/registry-server/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.464424 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/pull/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.487107 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/util/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.546642 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/pull/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.810589 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/pull/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.812124 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/util/0.log" Oct 07 20:38:55 crc kubenswrapper[4813]: I1007 20:38:55.882195 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835clmnb8_cfcbc16d-e9e2-4055-b621-f8d85a0c0ad2/extract/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.087488 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-trcm6_54507780-d039-4960-b75e-579f3b0aa7f5/marketplace-operator/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.132425 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-utilities/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.293306 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-utilities/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.348721 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-content/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.381355 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-content/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.525997 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-content/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.611718 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/extract-utilities/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.765222 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-utilities/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.770353 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-q5n28_d79cb01a-80ee-46db-93e3-c53740304297/registry-server/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.978575 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-utilities/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.993031 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-content/0.log" Oct 07 20:38:56 crc kubenswrapper[4813]: I1007 20:38:56.996977 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-content/0.log" Oct 07 20:38:57 crc kubenswrapper[4813]: I1007 20:38:57.155554 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-utilities/0.log" Oct 07 20:38:57 crc kubenswrapper[4813]: I1007 20:38:57.180594 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/extract-content/0.log" Oct 07 20:38:57 crc kubenswrapper[4813]: I1007 20:38:57.626667 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-w6v68_3588ed4b-20d8-4233-8542-27542f2bb5e4/registry-server/0.log" Oct 07 20:40:22 crc kubenswrapper[4813]: I1007 20:40:22.079028 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:40:22 crc kubenswrapper[4813]: I1007 20:40:22.079723 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:40:52 crc kubenswrapper[4813]: I1007 20:40:52.078845 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:40:52 crc kubenswrapper[4813]: I1007 20:40:52.079409 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.079423 4813 patch_prober.go:28] interesting pod/machine-config-daemon-gcfdf container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.080034 4813 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.080103 4813 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.081147 4813 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3"} pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.081245 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" containerName="machine-config-daemon" containerID="cri-o://bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" gracePeriod=600 Oct 07 20:41:22 crc kubenswrapper[4813]: E1007 20:41:22.216254 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.620545 4813 generic.go:334] "Generic (PLEG): container finished" podID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerID="20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9" exitCode=0 Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.620626 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jdm8p/must-gather-g9n48" event={"ID":"4b27f1b7-5f74-4e67-8a17-5ba84c954120","Type":"ContainerDied","Data":"20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9"} Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.621253 4813 scope.go:117] "RemoveContainer" containerID="20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.623007 4813 generic.go:334] "Generic (PLEG): container finished" podID="537f8a53-dde4-4808-a822-9d8c922a8499" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" exitCode=0 Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.623038 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" event={"ID":"537f8a53-dde4-4808-a822-9d8c922a8499","Type":"ContainerDied","Data":"bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3"} Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.623089 4813 scope.go:117] "RemoveContainer" containerID="cc892fa0043484caa2f6d2787f88fed7a1e1635a66e9a8e0e51588d6df4363cf" Oct 07 20:41:22 crc kubenswrapper[4813]: I1007 20:41:22.624597 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:41:22 crc kubenswrapper[4813]: E1007 20:41:22.625032 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:41:23 crc kubenswrapper[4813]: I1007 20:41:23.533020 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jdm8p_must-gather-g9n48_4b27f1b7-5f74-4e67-8a17-5ba84c954120/gather/0.log" Oct 07 20:41:33 crc kubenswrapper[4813]: I1007 20:41:33.602873 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:41:33 crc kubenswrapper[4813]: E1007 20:41:33.604259 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:41:38 crc kubenswrapper[4813]: I1007 20:41:38.813781 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jdm8p/must-gather-g9n48"] Oct 07 20:41:38 crc kubenswrapper[4813]: I1007 20:41:38.814609 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jdm8p/must-gather-g9n48" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="copy" containerID="cri-o://33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0" gracePeriod=2 Oct 07 20:41:38 crc kubenswrapper[4813]: I1007 20:41:38.827254 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jdm8p/must-gather-g9n48"] Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.218251 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jdm8p_must-gather-g9n48_4b27f1b7-5f74-4e67-8a17-5ba84c954120/copy/0.log" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.219478 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.308862 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b27f1b7-5f74-4e67-8a17-5ba84c954120-must-gather-output\") pod \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.308964 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp687\" (UniqueName: \"kubernetes.io/projected/4b27f1b7-5f74-4e67-8a17-5ba84c954120-kube-api-access-mp687\") pod \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\" (UID: \"4b27f1b7-5f74-4e67-8a17-5ba84c954120\") " Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.316334 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b27f1b7-5f74-4e67-8a17-5ba84c954120-kube-api-access-mp687" (OuterVolumeSpecName: "kube-api-access-mp687") pod "4b27f1b7-5f74-4e67-8a17-5ba84c954120" (UID: "4b27f1b7-5f74-4e67-8a17-5ba84c954120"). InnerVolumeSpecName "kube-api-access-mp687". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.410958 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp687\" (UniqueName: \"kubernetes.io/projected/4b27f1b7-5f74-4e67-8a17-5ba84c954120-kube-api-access-mp687\") on node \"crc\" DevicePath \"\"" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.494586 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b27f1b7-5f74-4e67-8a17-5ba84c954120-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4b27f1b7-5f74-4e67-8a17-5ba84c954120" (UID: "4b27f1b7-5f74-4e67-8a17-5ba84c954120"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.512570 4813 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b27f1b7-5f74-4e67-8a17-5ba84c954120-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.811064 4813 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jdm8p_must-gather-g9n48_4b27f1b7-5f74-4e67-8a17-5ba84c954120/copy/0.log" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.811478 4813 generic.go:334] "Generic (PLEG): container finished" podID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerID="33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0" exitCode=143 Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.811531 4813 scope.go:117] "RemoveContainer" containerID="33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.811554 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jdm8p/must-gather-g9n48" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.830491 4813 scope.go:117] "RemoveContainer" containerID="20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.882562 4813 scope.go:117] "RemoveContainer" containerID="33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0" Oct 07 20:41:39 crc kubenswrapper[4813]: E1007 20:41:39.883313 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0\": container with ID starting with 33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0 not found: ID does not exist" containerID="33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.883458 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0"} err="failed to get container status \"33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0\": rpc error: code = NotFound desc = could not find container \"33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0\": container with ID starting with 33c16595eb86550f8aeaa52e6e0f8461e1c52ed87e9fed5bbfb7cc23d9e225a0 not found: ID does not exist" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.883539 4813 scope.go:117] "RemoveContainer" containerID="20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9" Oct 07 20:41:39 crc kubenswrapper[4813]: E1007 20:41:39.884168 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9\": container with ID starting with 20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9 not found: ID does not exist" containerID="20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9" Oct 07 20:41:39 crc kubenswrapper[4813]: I1007 20:41:39.884211 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9"} err="failed to get container status \"20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9\": rpc error: code = NotFound desc = could not find container \"20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9\": container with ID starting with 20e7197a017a14f7aaa5dc9fdbe40fb4cf904884c4a9e04b3624d207185ae5e9 not found: ID does not exist" Oct 07 20:41:40 crc kubenswrapper[4813]: I1007 20:41:40.618048 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" path="/var/lib/kubelet/pods/4b27f1b7-5f74-4e67-8a17-5ba84c954120/volumes" Oct 07 20:41:44 crc kubenswrapper[4813]: I1007 20:41:44.618970 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:41:44 crc kubenswrapper[4813]: E1007 20:41:44.620577 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:41:56 crc kubenswrapper[4813]: I1007 20:41:56.610302 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:41:56 crc kubenswrapper[4813]: E1007 20:41:56.611103 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:42:10 crc kubenswrapper[4813]: I1007 20:42:10.602681 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:42:10 crc kubenswrapper[4813]: E1007 20:42:10.603512 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:42:11 crc kubenswrapper[4813]: I1007 20:42:11.723086 4813 scope.go:117] "RemoveContainer" containerID="26e489c6191edc565985f538da9d781f32c6f53075084141f79b0769ea65053e" Oct 07 20:42:23 crc kubenswrapper[4813]: I1007 20:42:23.603785 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:42:23 crc kubenswrapper[4813]: E1007 20:42:23.604736 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:42:37 crc kubenswrapper[4813]: I1007 20:42:37.602851 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:42:37 crc kubenswrapper[4813]: E1007 20:42:37.604000 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.978746 4813 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pq4wz"] Oct 07 20:42:45 crc kubenswrapper[4813]: E1007 20:42:45.979889 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="gather" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.979910 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="gather" Oct 07 20:42:45 crc kubenswrapper[4813]: E1007 20:42:45.979961 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="registry-server" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.979975 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="registry-server" Oct 07 20:42:45 crc kubenswrapper[4813]: E1007 20:42:45.979995 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="extract-content" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.980006 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="extract-content" Oct 07 20:42:45 crc kubenswrapper[4813]: E1007 20:42:45.980020 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="extract-utilities" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.980029 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="extract-utilities" Oct 07 20:42:45 crc kubenswrapper[4813]: E1007 20:42:45.980063 4813 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="copy" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.980071 4813 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="copy" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.984306 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="copy" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.984350 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c031e0d-bebb-4af7-93db-f77c5567977c" containerName="registry-server" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.984400 4813 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b27f1b7-5f74-4e67-8a17-5ba84c954120" containerName="gather" Oct 07 20:42:45 crc kubenswrapper[4813]: I1007 20:42:45.986069 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.025312 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pq4wz"] Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.108536 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-catalog-content\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.108646 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhmwt\" (UniqueName: \"kubernetes.io/projected/62f29c13-e7cb-48d9-b719-5219e53ed378-kube-api-access-xhmwt\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.108684 4813 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-utilities\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.210495 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhmwt\" (UniqueName: \"kubernetes.io/projected/62f29c13-e7cb-48d9-b719-5219e53ed378-kube-api-access-xhmwt\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.210550 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-utilities\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.210624 4813 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-catalog-content\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.211170 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-catalog-content\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.211247 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-utilities\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.241287 4813 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhmwt\" (UniqueName: \"kubernetes.io/projected/62f29c13-e7cb-48d9-b719-5219e53ed378-kube-api-access-xhmwt\") pod \"redhat-operators-pq4wz\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.368918 4813 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:46 crc kubenswrapper[4813]: I1007 20:42:46.970304 4813 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pq4wz"] Oct 07 20:42:47 crc kubenswrapper[4813]: I1007 20:42:47.601195 4813 generic.go:334] "Generic (PLEG): container finished" podID="62f29c13-e7cb-48d9-b719-5219e53ed378" containerID="fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31" exitCode=0 Oct 07 20:42:47 crc kubenswrapper[4813]: I1007 20:42:47.601259 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerDied","Data":"fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31"} Oct 07 20:42:47 crc kubenswrapper[4813]: I1007 20:42:47.601745 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerStarted","Data":"df80f08021b79ffe6f591665be1c952d4bb8603fcf27fd66d22f9685c0dce05a"} Oct 07 20:42:47 crc kubenswrapper[4813]: I1007 20:42:47.604866 4813 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 07 20:42:49 crc kubenswrapper[4813]: I1007 20:42:49.625711 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerStarted","Data":"9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484"} Oct 07 20:42:50 crc kubenswrapper[4813]: I1007 20:42:50.603112 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:42:50 crc kubenswrapper[4813]: E1007 20:42:50.603658 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:42:53 crc kubenswrapper[4813]: I1007 20:42:53.664285 4813 generic.go:334] "Generic (PLEG): container finished" podID="62f29c13-e7cb-48d9-b719-5219e53ed378" containerID="9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484" exitCode=0 Oct 07 20:42:53 crc kubenswrapper[4813]: I1007 20:42:53.664305 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerDied","Data":"9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484"} Oct 07 20:42:54 crc kubenswrapper[4813]: I1007 20:42:54.676199 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerStarted","Data":"241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189"} Oct 07 20:42:54 crc kubenswrapper[4813]: I1007 20:42:54.695046 4813 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pq4wz" podStartSLOduration=3.110064753 podStartE2EDuration="9.695031361s" podCreationTimestamp="2025-10-07 20:42:45 +0000 UTC" firstStartedPulling="2025-10-07 20:42:47.604489207 +0000 UTC m=+5093.682744828" lastFinishedPulling="2025-10-07 20:42:54.189455785 +0000 UTC m=+5100.267711436" observedRunningTime="2025-10-07 20:42:54.693563049 +0000 UTC m=+5100.771818660" watchObservedRunningTime="2025-10-07 20:42:54.695031361 +0000 UTC m=+5100.773286972" Oct 07 20:42:56 crc kubenswrapper[4813]: I1007 20:42:56.369889 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:56 crc kubenswrapper[4813]: I1007 20:42:56.370396 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:42:57 crc kubenswrapper[4813]: I1007 20:42:57.416540 4813 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pq4wz" podUID="62f29c13-e7cb-48d9-b719-5219e53ed378" containerName="registry-server" probeResult="failure" output=< Oct 07 20:42:57 crc kubenswrapper[4813]: timeout: failed to connect service ":50051" within 1s Oct 07 20:42:57 crc kubenswrapper[4813]: > Oct 07 20:43:02 crc kubenswrapper[4813]: I1007 20:43:02.603241 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:43:02 crc kubenswrapper[4813]: E1007 20:43:02.605852 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" Oct 07 20:43:06 crc kubenswrapper[4813]: I1007 20:43:06.427581 4813 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:43:06 crc kubenswrapper[4813]: I1007 20:43:06.500672 4813 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:43:06 crc kubenswrapper[4813]: I1007 20:43:06.670018 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pq4wz"] Oct 07 20:43:07 crc kubenswrapper[4813]: I1007 20:43:07.800652 4813 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pq4wz" podUID="62f29c13-e7cb-48d9-b719-5219e53ed378" containerName="registry-server" containerID="cri-o://241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189" gracePeriod=2 Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.301701 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.470300 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-utilities\") pod \"62f29c13-e7cb-48d9-b719-5219e53ed378\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.470789 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-catalog-content\") pod \"62f29c13-e7cb-48d9-b719-5219e53ed378\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.470840 4813 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhmwt\" (UniqueName: \"kubernetes.io/projected/62f29c13-e7cb-48d9-b719-5219e53ed378-kube-api-access-xhmwt\") pod \"62f29c13-e7cb-48d9-b719-5219e53ed378\" (UID: \"62f29c13-e7cb-48d9-b719-5219e53ed378\") " Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.471744 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-utilities" (OuterVolumeSpecName: "utilities") pod "62f29c13-e7cb-48d9-b719-5219e53ed378" (UID: "62f29c13-e7cb-48d9-b719-5219e53ed378"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.472992 4813 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-utilities\") on node \"crc\" DevicePath \"\"" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.479723 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f29c13-e7cb-48d9-b719-5219e53ed378-kube-api-access-xhmwt" (OuterVolumeSpecName: "kube-api-access-xhmwt") pod "62f29c13-e7cb-48d9-b719-5219e53ed378" (UID: "62f29c13-e7cb-48d9-b719-5219e53ed378"). InnerVolumeSpecName "kube-api-access-xhmwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.547781 4813 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62f29c13-e7cb-48d9-b719-5219e53ed378" (UID: "62f29c13-e7cb-48d9-b719-5219e53ed378"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.574545 4813 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f29c13-e7cb-48d9-b719-5219e53ed378-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.574590 4813 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhmwt\" (UniqueName: \"kubernetes.io/projected/62f29c13-e7cb-48d9-b719-5219e53ed378-kube-api-access-xhmwt\") on node \"crc\" DevicePath \"\"" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.814819 4813 generic.go:334] "Generic (PLEG): container finished" podID="62f29c13-e7cb-48d9-b719-5219e53ed378" containerID="241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189" exitCode=0 Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.814892 4813 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pq4wz" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.814906 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerDied","Data":"241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189"} Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.815000 4813 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pq4wz" event={"ID":"62f29c13-e7cb-48d9-b719-5219e53ed378","Type":"ContainerDied","Data":"df80f08021b79ffe6f591665be1c952d4bb8603fcf27fd66d22f9685c0dce05a"} Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.815049 4813 scope.go:117] "RemoveContainer" containerID="241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.855459 4813 scope.go:117] "RemoveContainer" containerID="9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.857930 4813 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pq4wz"] Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.872635 4813 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pq4wz"] Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.882762 4813 scope.go:117] "RemoveContainer" containerID="fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.926317 4813 scope.go:117] "RemoveContainer" containerID="241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189" Oct 07 20:43:08 crc kubenswrapper[4813]: E1007 20:43:08.926974 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189\": container with ID starting with 241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189 not found: ID does not exist" containerID="241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.927037 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189"} err="failed to get container status \"241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189\": rpc error: code = NotFound desc = could not find container \"241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189\": container with ID starting with 241751f89babb92b36368cd5dfe4499ba31cb3bb8819d8ba972c4b16e53ca189 not found: ID does not exist" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.927064 4813 scope.go:117] "RemoveContainer" containerID="9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484" Oct 07 20:43:08 crc kubenswrapper[4813]: E1007 20:43:08.927482 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484\": container with ID starting with 9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484 not found: ID does not exist" containerID="9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.927536 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484"} err="failed to get container status \"9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484\": rpc error: code = NotFound desc = could not find container \"9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484\": container with ID starting with 9fa0084ffae879521962bfc655ba046bb51927275ee46643d400b7005675a484 not found: ID does not exist" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.927570 4813 scope.go:117] "RemoveContainer" containerID="fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31" Oct 07 20:43:08 crc kubenswrapper[4813]: E1007 20:43:08.928721 4813 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31\": container with ID starting with fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31 not found: ID does not exist" containerID="fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31" Oct 07 20:43:08 crc kubenswrapper[4813]: I1007 20:43:08.928755 4813 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31"} err="failed to get container status \"fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31\": rpc error: code = NotFound desc = could not find container \"fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31\": container with ID starting with fa2066b174886ecc3e89da2ee71081119b963a31c7224ca7873e75caaa15bb31 not found: ID does not exist" Oct 07 20:43:10 crc kubenswrapper[4813]: I1007 20:43:10.627971 4813 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f29c13-e7cb-48d9-b719-5219e53ed378" path="/var/lib/kubelet/pods/62f29c13-e7cb-48d9-b719-5219e53ed378/volumes" Oct 07 20:43:16 crc kubenswrapper[4813]: I1007 20:43:16.602602 4813 scope.go:117] "RemoveContainer" containerID="bc1263d3d1a4130af46048aca028722ac4abc6b4140f60c5f3c1731fbd9350c3" Oct 07 20:43:16 crc kubenswrapper[4813]: E1007 20:43:16.603359 4813 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gcfdf_openshift-machine-config-operator(537f8a53-dde4-4808-a822-9d8c922a8499)\"" pod="openshift-machine-config-operator/machine-config-daemon-gcfdf" podUID="537f8a53-dde4-4808-a822-9d8c922a8499" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071275565024462 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071275566017400 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071263137016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071263137015462 5ustar corecore